|
19 | 19 |
|
20 | 20 | class PackageSearch: |
21 | 21 | package_data = {} |
22 | | - local_cache ={} |
23 | | - cache_keys = [] |
24 | 22 | DISTRO_BIT_MAP = {} |
25 | 23 | INSTANCE = None |
26 | 24 |
|
@@ -52,84 +50,12 @@ def loadSupportedDistros(cls): |
52 | 50 | bitFlag += bitFlag |
53 | 51 | return cls.DISTRO_BIT_MAP |
54 | 52 |
|
55 | | - @classmethod |
56 | | - def loadPackageData(cls): |
57 | | - ''' |
58 | | - Returns list of Packages in software-discovery-tool |
59 | | - ''' |
60 | | - |
61 | | - LOGGER.debug('loadPackageData: In loadSupportedDistros') |
62 | | - distro_data_file = '%s/cached_data.json' % cls.getDataFilePath() |
63 | | - try: |
64 | | - json_data = json.load(open(distro_data_file)) |
65 | | - except: |
66 | | - LOGGER.warn('loadPackageData: Loading cached distros data failed generating from scratch') |
67 | | - LOGGER.debug('loadPackageData: start writing distros data') |
68 | | - json_data = cls.preparePackageData() |
69 | | - cached_file = open(distro_data_file, 'w') |
70 | | - cached_file.write(json.dumps(json_data, indent=2, separators=(',', ': '))) |
71 | | - cached_file.close() |
72 | | - LOGGER.debug('loadPackageData: end writing distros data') |
73 | | - |
74 | | - LOGGER.debug('loadPackageData: Loading supported distros data') |
75 | | - |
76 | | - return json_data |
77 | | - |
78 | | - @classmethod |
79 | | - def preparePackageData(cls): |
80 | | - data_dir = cls.getDataFilePath() |
81 | | - package_info = []; |
82 | | - package_data = {}; |
83 | | - cachedPackage = {} |
84 | | - |
85 | | - for distroName in list(SUPPORTED_DISTROS.keys()): |
86 | | - for distroVersion in sorted(SUPPORTED_DISTROS[distroName].keys()): |
87 | | - distro_file = SUPPORTED_DISTROS[distroName][distroVersion] |
88 | | - |
89 | | - package_info = json.load(open('%s/%s' % (data_dir, distro_file))) |
90 | | - distro_file_name = distro_file |
91 | | - |
92 | | - for pkg in package_info: |
93 | | - try: |
94 | | - pkg_key = pkg["packageName"] + '_' + pkg["version"] |
95 | | - except Exception as ex: |
96 | | - LOGGER.error('preparePackageData: key not found for package %s' % str(ex)) |
97 | | - if pkg_key not in package_data: |
98 | | - cachedPackage = {} |
99 | | - cachedPackage["P"] = pkg["packageName"] |
100 | | - cachedPackage["S"] = cachedPackage["P"].lower().upper() |
101 | | - cachedPackage["V"] = pkg["version"] |
102 | | - if "description" in pkg: |
103 | | - cachedPackage["D"] = pkg["description"] |
104 | | - try: |
105 | | - cachedPackage["B"] = cls.DISTRO_BIT_MAP[distroName][distroVersion] |
106 | | - except Exception as e: |
107 | | - raise #This occurrs only if there is a problem with how SUPPORTED_DISTROS is configured in config py |
108 | | - |
109 | | - cachedPackage[distroName] = [distroVersion] |
110 | | - package_data[pkg_key] = cachedPackage |
111 | | - else: |
112 | | - if distroName not in package_data[pkg_key]: |
113 | | - package_data[pkg_key][distroName] = [distroVersion] |
114 | | - package_data[pkg_key]['B'] += cls.DISTRO_BIT_MAP[distroName][distroVersion] |
115 | | - else: |
116 | | - if distroVersion not in package_data[pkg_key][distroName]: |
117 | | - package_data[pkg_key][distroName].append(distroVersion) |
118 | | - package_data[pkg_key]['B'] += cls.DISTRO_BIT_MAP[distroName][distroVersion] |
119 | | - |
120 | | - json_data = list(package_data.values()) |
121 | | - |
122 | | - return json_data |
123 | | - |
124 | 53 | @classmethod |
125 | 54 | def get_instance(cls): |
126 | 55 | LOGGER.debug('get_instance: In get_instance') |
127 | 56 | if not cls.INSTANCE: |
128 | 57 | cls.INSTANCE = PackageSearch() |
129 | 58 | cls.INSTANCE.DISTRO_BIT_MAP = cls.loadSupportedDistros() |
130 | | - cls.INSTANCE.package_data = cls.loadPackageData() |
131 | | - cls.INSTANCE.local_cache = {} |
132 | | - cls.INSTANCE.cache_keys = [] |
133 | 59 | LOGGER.debug('get_instance: Creating singleton instance in get_instance') |
134 | 60 | return cls.INSTANCE |
135 | 61 |
|
@@ -170,50 +96,9 @@ def searchPackages(self, search_term, exact_match, search_bit_flag, page_number |
170 | 96 | LOGGER.debug('searchPackages: search_packages_end_with : %s' % (search_packages_end_with)) |
171 | 97 | LOGGER.debug('searchPackages: search_anywhere_in_packages : %s' % (search_anywhere_in_packages)) |
172 | 98 |
|
173 | | - cache_key = 'ck_%s_%s_%s' % (search_term, exact_match, search_bit_flag) |
174 | | - LOGGER.debug('searchPackages: Cache Key is : %s' % (cache_key)) |
175 | | - |
176 | 99 | search_term = search_term.replace('*', '') |
177 | 100 | search_term_ucase = search_term.upper() |
178 | 101 |
|
179 | | - preliminary_results = {} |
180 | | - if( (cache_key in self.INSTANCE.local_cache) == False ): |
181 | | - LOGGER.debug('searchPackages: Not available in cache, so make fresh search') |
182 | | - LOGGER.debug(self.INSTANCE.package_data) |
183 | | - if (exact_match == True): |
184 | | - LOGGER.debug('searchPackages: Doing exact search') |
185 | | - preliminary_results = [s for s in self.INSTANCE.package_data if s['P'] == search_term and (s['B'] & search_bit_flag) > 0] |
186 | | - elif search_anywhere_in_packages: |
187 | | - LOGGER.debug('searchPackages: Doing Anywhere Search') |
188 | | - preliminary_results = [s for s in self.INSTANCE.package_data if search_term_ucase in s['S'] and (s['B'] & search_bit_flag) > 0] |
189 | | - elif search_packages_begin_with: |
190 | | - LOGGER.debug('searchPackages: Find names that begin with') |
191 | | - preliminary_results = [s for s in self.INSTANCE.package_data if str(s['S']).startswith(search_term_ucase) and (s['B'] & search_bit_flag) > 0] |
192 | | - elif search_packages_end_with: |
193 | | - LOGGER.debug('searchPackages: Find names that end with') |
194 | | - preliminary_results = [s for s in self.INSTANCE.package_data if str(s['S']).endswith(search_term_ucase) and (s['B'] & search_bit_flag) > 0] |
195 | | - |
196 | | - final_results = copy.deepcopy(preliminary_results); #Deep Copy is required since we just need to remove the "S" field from returnable result |
197 | | - for pkg in final_results: |
198 | | - del pkg['S'] |
199 | | - |
200 | | - LOGGER.debug('searchPackages: Search Results Length : %s' % (len(final_results))) |
201 | | - |
202 | | - if(len(final_results) > MAX_RECORDS_TO_SEND): #This is a large result set so add it to cache |
203 | | - LOGGER.debug('searchPackages: Add results to cache') |
204 | | - if(len(list(self.INSTANCE.local_cache.keys())) >= CACHE_SIZE): #CACHE_SIZE is breached so remove oldest cached object |
205 | | - #LOGGER.debug('searchPackages: Cache full. So remove the oldest item. Total of Cached Items: %s' % (len(self.INSTANCE.local_cache.keys())) |
206 | | - self.INSTANCE.local_cache.pop(self.INSTANCE.cache_keys[0],None) #self.INSTANCE.cache_keys[0] has the Oldest Cache Key |
207 | | - self.INSTANCE.cache_keys.remove(self.INSTANCE.cache_keys[0]) #Remoe the cache_key from cache_keys for it is removed from local_cache |
208 | | - |
209 | | - LOGGER.debug('searchPackages: Add new Key to cache_keys for indexing.') |
210 | | - self.INSTANCE.cache_keys.append(cache_key) #append the new key to the list of cache_keys |
211 | | - self.INSTANCE.local_cache[cache_key] = final_results |
212 | | - else: |
213 | | - LOGGER.debug('searchPackages: Getting from cache') |
214 | | - final_results = self.INSTANCE.local_cache[cache_key]; |
215 | | - |
216 | | - LOGGER.debug('searchPackages: Cache Keys: %s' %(json.dumps(self.INSTANCE.cache_keys))) |
217 | 102 | totalLength = len(final_results) |
218 | 103 |
|
219 | 104 | last_page = math.ceil(totalLength/float(MAX_RECORDS_TO_SEND)) |
|
0 commit comments