@@ -1013,19 +1013,13 @@ def rec_deps(services, service_name, start_point=None):
10131013 return deps
10141014
10151015
1016- def flat_deps (services , with_extends = False ):
1016+ def flat_deps (services ):
10171017 """
10181018 create dependencies "_deps" or update it recursively for all services
10191019 """
10201020 for name , srv in services .items ():
10211021 deps = set ()
10221022 srv ["_deps" ] = deps
1023- if with_extends :
1024- ext = srv .get ("extends" , {}).get ("service" , None )
1025- if ext :
1026- if ext != name :
1027- deps .add (ext )
1028- continue
10291023 deps_ls = srv .get ("depends_on" , None ) or []
10301024 if is_str (deps_ls ):
10311025 deps_ls = [deps_ls ]
@@ -1211,7 +1205,9 @@ def rec_merge_one(target, source):
12111205 raise ValueError (
12121206 f"can't merge value of { key } of type { value_type } and { value2_type } "
12131207 )
1214- if is_list (value2 ):
1208+ if type (value ) == set :
1209+ value = value .update (value2 )
1210+ elif is_list (value2 ):
12151211 if key == "volumes" :
12161212 # clean duplicate mount targets
12171213 pts = {v .split (":" , 1 )[1 ] for v in value2 if ":" in v }
@@ -1241,36 +1237,179 @@ def rec_merge(target, *sources):
12411237 return ret
12421238
12431239
1244- def resolve_extends (services , service_names , environ ):
1245- for name in service_names :
1246- service = services [name ]
1247- ext = service .get ("extends" , {})
1248- if is_str (ext ):
1249- ext = {"service" : ext }
1250- from_service_name = ext .get ("service" , None )
1251- if not from_service_name :
1252- continue
1253- filename = ext .get ("file" , None )
1254- if filename :
1255- if filename .startswith ("./" ):
1256- filename = filename [2 :]
1240+ class ComposeFileParsingException (Exception ):
1241+ pass
1242+
1243+ class ComposeFileParsingCircularDependencyException (ComposeFileParsingException ):
1244+ pass
1245+
1246+ def pretty_print_tuple (tup , data ):
1247+ file , service = tup
1248+ if file is None :
1249+ return "," .join (data ), service
1250+ return file , service
1251+
1252+ class OrderedSet ():
1253+ def __init__ (self ):
1254+ self .as_list = list ()
1255+ self .as_set = set ()
1256+
1257+ def pop (self ):
1258+ r = self .as_list .pop ()
1259+ self .as_set .remove (r )
1260+
1261+ def add (self , element ):
1262+ self .as_list .append (element )
1263+ self .as_set .add (element )
1264+
1265+ def __contains__ (self , element ):
1266+ return element in self .as_set
1267+
1268+ def __str__ (self ):
1269+ return str (self .as_list )
1270+
1271+ def pretty_print (self , data ):
1272+ new_list = []
1273+ for tup in self .as_list :
1274+ new_list .append (pretty_print_tuple (tup ,data ))
1275+ return new_list
1276+
1277+ class CachedComposeFileParser ():
1278+ """
1279+ This class handles preprocessed merged_yaml, and then recursively updates services
1280+ in order to remove extends field
1281+ # TODO: make all parsing recursive and move it to this class
1282+ """
1283+ def __init__ (self , environ , files ):
1284+ self .environ = environ
1285+ self .merged_yaml = {}
1286+ self .merged_files = [os .path .realpath (file ) for file in files ]
1287+ self .cache = dict ()
1288+
1289+ def generate_compose (self ):
1290+ for filename in self .merged_files :
12571291 with open (filename , "r" , encoding = "utf-8" ) as f :
1258- content = yaml .safe_load (f ) or {}
1292+ content = yaml .safe_load (f )
1293+ # log(filename, json.dumps(content, indent = 2))
1294+ if not isinstance (content , dict ):
1295+ raise ComposeFileParsingException (
1296+ "Compose file does not contain a top level object: %s\n "
1297+ % filename
1298+ )
1299+ content = normalize (content )
1300+ # log(filename, json.dumps(content, indent = 2))
1301+ content = rec_subs (content , self .environ )
1302+ rec_merge (self .merged_yaml , content )
1303+ return self .merged_yaml
1304+
1305+ def read_file_and_cache_it (self , filename ):
1306+ real_file = os .path .realpath (filename )
1307+ if real_file in self .merged_files :
1308+ return self .merged_yaml
1309+ if real_file not in self .cache :
1310+ with open (filename , "r" , encoding = "utf-8" ) as f :
1311+ content = yaml .safe_load (f ) or dict ()
12591312 if "services" in content :
1260- content = content ["services" ]
1261- subdirectory = os .path .dirname (filename )
1262- content = rec_subs (content , environ )
1263- from_service = content .get (from_service_name , {})
1264- normalize_service (from_service , subdirectory )
1265- else :
1266- from_service = services .get (from_service_name , {}).copy ()
1267- del from_service ["_deps" ]
1268- try :
1269- del from_service ["extends" ]
1270- except KeyError :
1271- pass
1272- new_service = rec_merge ({}, from_service , service )
1273- services [name ] = new_service
1313+ services = content ["services" ]
1314+ services = rec_subs (services , self .environ )
1315+ content ['services' ] = services
1316+ self .cache [real_file ] = content
1317+ return self .cache [real_file ]
1318+
1319+ def _pretty_file (self , name ):
1320+ if name is None :
1321+ # merged docker-compose files passed to script
1322+ # are treated as a single file
1323+ return "," .join (self .merged_files )
1324+ return name
1325+
1326+ def _service_not_found (self , service_name , parent_service_name , parent_filename ):
1327+ pretty_file = self ._pretty_file (parent_filename )
1328+ msg = f"Service { service_name } has dependency of '{ parent_service_name } " \
1329+ f"which does not exist in { pretty_file } "
1330+ raise ComposeFileParsingException (msg )
1331+
1332+ def resolve_extend (self , services , service , service_name , current_filename ,
1333+ circular_dep_detector ):
1334+ def patch_parent_filename_if_default (parent_filename ):
1335+ if os .path .isabs (parent_filename ):
1336+ return parent_filename
1337+ temp_name = current_filename
1338+ if temp_name is None :
1339+ temp_name = self .merged_files [0 ]
1340+ parent_filename = os .path .join (os .path .dirname (temp_name ), parent_filename )
1341+ return parent_filename
1342+ if current_filename is not None :
1343+ current_filename = os .path .realpath (current_filename )
1344+ if current_filename in self .merged_files :
1345+ current_filename = None
1346+ service_unique_identifier = (current_filename , service_name ,)
1347+ if service_unique_identifier in circular_dep_detector :
1348+ msg = f"Circular dependency to { pretty_print_tuple (service_unique_identifier , self .merged_files )} " \
1349+ f"detected: { circular_dep_detector .pretty_print (self .merged_files )} "
1350+ raise ComposeFileParsingCircularDependencyException (msg )
1351+ circular_dep_detector .add (service_unique_identifier )
1352+ try :
1353+ extends_section = service .get ("extends" )
1354+ if extends_section is None :
1355+ return service
1356+
1357+ if is_str (extends_section ):
1358+ parent_service_name = extends_section
1359+ else :
1360+ parent_service_name = extends_section .get ("service" )
1361+
1362+ if parent_service_name is None :
1363+ pretty_name = self ._pretty_file (current_filename )
1364+ raise ComposeFileParsingException (f"Service { service_name } in { pretty_name } has" \
1365+ f"extends field and no service name" )
1366+ parent_filename = extends_section .get ("file" )
1367+ if parent_filename :
1368+ if parent_filename .startswith ("./" ):
1369+ parent_filename = parent_filename [2 :]
1370+ subdirectory = os .path .dirname (parent_filename )
1371+ parent_filename = patch_parent_filename_if_default (parent_filename )
1372+ content = self .read_file_and_cache_it (parent_filename )
1373+ # ADDED: normalize each service later
1374+ from_service_ref = content ['services' ].get (parent_service_name )
1375+ if from_service_ref is None :
1376+ self ._service_not_found (service_name , parent_service_name , parent_filename )
1377+ from_service_ref = self .resolve_extend (content ['services' ],from_service_ref , parent_service_name ,
1378+ parent_filename , circular_dep_detector )
1379+ from_service_ref = normalize_service (from_service_ref , subdirectory )
1380+ content [parent_service_name ] = from_service_ref
1381+ from_service = from_service_ref .copy ()
1382+ else :
1383+ mutable_parent_service = services .get (parent_service_name )
1384+ if mutable_parent_service is None :
1385+ self ._service_not_found (service_name , parent_service_name , current_filename )
1386+ from_service_ref = self .resolve_extend (services , mutable_parent_service ,
1387+ parent_service_name , current_filename , circular_dep_detector )
1388+ from_service = from_service_ref .copy ()
1389+ assert 'extends' in service # ensure, same service is not processed twice
1390+ del service ['extends' ]
1391+ normalize_service (service )
1392+ services [service_name ] = rec_merge (dict (), from_service , service )
1393+ assert services [service_name ] is not None
1394+ finally :
1395+ circular_dep_detector .pop ()
1396+ return services [service_name ]
1397+
1398+ def parse_services (self ):
1399+ # if current_filename is None it means
1400+ # we are handling merged compose file
1401+ current_filename = "," .join (self .merged_files )
1402+ services = self .merged_yaml .get ("services" , None )
1403+ if services is None :
1404+ services = {}
1405+ log (f"WARNING: No services defined in { current_filename } " )
1406+ for service_name in services .keys ():
1407+ services [service_name ] = self .resolve_extend (services , services [service_name ],
1408+ service_name , current_filename ,
1409+ OrderedSet ())
1410+ assert services [service_name ] is not None
1411+ flat_deps (services )
1412+ return services
12741413
12751414
12761415def dotenv_to_dict (dotenv_path ):
@@ -1444,21 +1583,10 @@ def _parse_compose_file(self):
14441583 "COMPOSE_PATH_SEPARATOR" : pathsep ,
14451584 }
14461585 )
1447- compose = {}
1448- for filename in files :
1449- with open (filename , "r" , encoding = "utf-8" ) as f :
1450- content = yaml .safe_load (f )
1451- # log(filename, json.dumps(content, indent = 2))
1452- if not isinstance (content , dict ):
1453- sys .stderr .write (
1454- "Compose file does not contain a top level object: %s\n "
1455- % filename
1456- )
1457- sys .exit (1 )
1458- content = normalize (content )
1459- # log(filename, json.dumps(content, indent = 2))
1460- content = rec_subs (content , self .environ )
1461- rec_merge (compose , content )
1586+
1587+ parser = CachedComposeFileParser (self .environ , files )
1588+ compose = parser .generate_compose ()
1589+
14621590 self .merged_yaml = yaml .safe_dump (compose )
14631591 merged_json_b = json .dumps (compose , separators = ("," , ":" )).encode ("utf-8" )
14641592 self .yaml_hash = hashlib .sha256 (merged_json_b ).hexdigest ()
@@ -1484,19 +1612,8 @@ def _parse_compose_file(self):
14841612 self .project_name = project_name
14851613 self .environ .update ({"COMPOSE_PROJECT_NAME" : self .project_name })
14861614
1487- services = compose .get ("services" , None )
1488- if services is None :
1489- services = {}
1490- log ("WARNING: No services defined" )
1615+ services = parser .parse_services ()
14911616
1492- # NOTE: maybe add "extends.service" to _deps at this stage
1493- flat_deps (services , with_extends = True )
1494- service_names = sorted (
1495- [(len (srv ["_deps" ]), name ) for name , srv in services .items ()]
1496- )
1497- service_names = [name for _ , name in service_names ]
1498- resolve_extends (services , service_names , self .environ )
1499- flat_deps (services )
15001617 service_names = sorted (
15011618 [(len (srv ["_deps" ]), name ) for name , srv in services .items ()]
15021619 )
@@ -1616,6 +1733,7 @@ def _parse_args(self):
16161733 if not self .global_args .command or self .global_args .command == "help" :
16171734 parser .print_help ()
16181735 sys .exit (- 1 )
1736+ print (self .global_args )
16191737 return self .global_args
16201738
16211739 @staticmethod
0 commit comments