A web scraper build to search specific information for a given compound (and its pseudonyms)

Merge branch 'develop' of github.com:Recondor/Fourmi into develop

+293 -3
+16
FourmiCrawler/pipelines.py
··· 4 4 5 5 from scrapy.exceptions import DropItem 6 6 7 + class RemoveNonePipeline(object): 8 + 9 + def __init__(self): 10 + self.known_values = set() 11 + 12 + def process_item(self, item, spider): 13 + """ 14 + Processing the items so None values are replaced by empty strings 15 + :param item: The incoming item 16 + :param spider: The spider which scraped the spider 17 + :return: :raise DropItem: Returns the item if unique or drops them if it's already known 18 + """ 19 + for key in item: 20 + if item[key] is None: 21 + item[key] = "" 22 + return item 7 23 8 24 class DuplicatePipeline(object): 9 25
+3 -2
FourmiCrawler/settings.py
··· 11 11 SPIDER_MODULES = ['FourmiCrawler'] 12 12 NEWSPIDER_MODULE = 'FourmiCrawler' 13 13 ITEM_PIPELINES = { 14 - 'FourmiCrawler.pipelines.AttributeSelectionPipeline': 100, 15 - 'FourmiCrawler.pipelines.DuplicatePipeline': 200, 14 + "FourmiCrawler.pipelines.RemoveNonePipeline": 100, 15 + 'FourmiCrawler.pipelines.AttributeSelectionPipeline': 200, 16 + 'FourmiCrawler.pipelines.DuplicatePipeline': 300, 16 17 } 17 18 FEED_URI = 'results.json' 18 19 FEED_FORMAT = 'jsonlines'
+273
FourmiCrawler/sources/NIST.py
··· 1 + from source import Source 2 + from scrapy import log 3 + from scrapy.http import Request 4 + from scrapy.selector import Selector 5 + from FourmiCrawler.items import Result 6 + import re 7 + 8 + # [TODO]: values can be '128.', perhaps remove the dot in that case? 9 + # [TODO]: properties have references and comments which do not exist in the 10 + # Result item, but should be included eventually. 11 + 12 + class NIST(Source): 13 + """NIST Scraper plugin 14 + 15 + This plugin manages searching for a chemical on the NIST website 16 + and parsing the resulting page if the chemical exists on NIST. 17 + """ 18 + website = "http://webbook.nist.gov/*" 19 + 20 + search = 'cgi/cbook.cgi?Name=%s&Units=SI&cTP=on' 21 + 22 + ignore_list = set() 23 + 24 + def __init__(self): 25 + Source.__init__(self) 26 + 27 + def parse(self, response): 28 + sel = Selector(response) 29 + 30 + title = sel.xpath('head/title/text()').extract()[0] 31 + if title == 'Name Not Found': 32 + log.msg('NIST: Chemical not found!', level=log.ERROR) 33 + return 34 + if title not in self.ignore_list: 35 + self.ignore_list.update(title) 36 + log.msg('NIST emit synonym: %s' % title, level=log.DEBUG) 37 + self._spider.get_synonym_requests(title) 38 + 39 + requests = [] 40 + 41 + requests.extend(self.parse_generic_info(sel)) 42 + 43 + symbol_table = {} 44 + tds = sel.xpath('//table[@class="symbol_table"]/tr/td') 45 + for (symbol_td, name_td) in zip(tds[::2], tds[1::2]): 46 + symbol = ''.join(symbol_td.xpath('node()').extract()) 47 + name = name_td.xpath('text()').extract()[0] 48 + symbol_table[symbol] = name 49 + log.msg('NIST symbol: |%s|, name: |%s|' % (symbol, name), 50 + level=log.DEBUG) 51 + 52 + for table in sel.xpath('//table[@class="data"]'): 53 + summary = table.xpath('@summary').extract()[0] 54 + if summary == 'One dimensional data': 55 + log.msg('NIST table: Aggregrate data', level=log.DEBUG) 56 + requests.extend( 57 + self.parse_aggregate_data(table, symbol_table)) 58 + elif table.xpath('tr/th="Initial Phase"').extract()[0] == '1': 59 + log.msg('NIST table; Enthalpy/entropy of phase transition', 60 + level=log.DEBUG) 61 + requests.extend(self.parse_transition_data(table, summary)) 62 + elif table.xpath('tr[1]/td'): 63 + log.msg('NIST table: Horizontal table', level=log.DEBUG) 64 + elif summary == 'Antoine Equation Parameters': 65 + log.msg('NIST table: Antoine Equation Parameters', 66 + level=log.DEBUG) 67 + requests.extend(self.parse_antoine_data(table, summary)) 68 + elif len(table.xpath('tr[1]/th')) == 5: 69 + log.msg('NIST table: generic 5 columns', level=log.DEBUG) 70 + # Symbol (unit) Temperature (K) Method Reference Comment 71 + requests.extend(self.parse_generic_data(table, summary)) 72 + elif len(table.xpath('tr[1]/th')) == 4: 73 + log.msg('NIST table: generic 4 columns', level=log.DEBUG) 74 + # Symbol (unit) Temperature (K) Reference Comment 75 + requests.extend(self.parse_generic_data(table, summary)) 76 + else: 77 + log.msg('NIST table: NOT SUPPORTED', level=log.WARNING) 78 + continue #Assume unsupported 79 + return requests 80 + 81 + def parse_generic_info(self, sel): 82 + """Parses: synonyms, chemical formula, molecular weight, InChI, 83 + InChiKey, CAS number 84 + """ 85 + ul = sel.xpath('body/ul[li/strong="IUPAC Standard InChI:"]') 86 + li = ul.xpath('li') 87 + 88 + raw_synonyms = ul.xpath('li[strong="Other names:"]/text()').extract() 89 + for synonym in raw_synonyms[0].strip().split(';\n'): 90 + log.msg('NIST synonym: %s' % synonym, level=log.DEBUG) 91 + self.ignore_list.update(synonym) 92 + self._spider.get_synonym_requests(synonym) 93 + 94 + data = {} 95 + 96 + raw_formula = ul.xpath('li[strong/a="Formula"]//text()').extract() 97 + data['Chemical formula'] = ''.join(raw_formula[2:]).strip() 98 + 99 + raw_mol_weight = ul.xpath('li[strong/a="Molecular weight"]/text()') 100 + data['Molecular weight'] = raw_mol_weight.extract()[0].strip() 101 + 102 + raw_inchi = ul.xpath('li[strong="IUPAC Standard InChI:"]//tt/text()') 103 + data['IUPAC Standard InChI'] = raw_inchi.extract()[0] 104 + 105 + raw_inchikey = ul.xpath('li[strong="IUPAC Standard InChIKey:"]' 106 + '/tt/text()') 107 + data['IUPAC Standard InChIKey'] = raw_inchikey.extract()[0] 108 + 109 + raw_cas_number = ul.xpath('li[strong="CAS Registry Number:"]/text()') 110 + data['CAS Registry Number'] = raw_cas_number.extract()[0].strip() 111 + 112 + requests = [] 113 + for key, value in data.iteritems(): 114 + result = Result({ 115 + 'attribute': key, 116 + 'value': value, 117 + 'source': 'NIST', 118 + 'reliability': 'Unknown', 119 + 'conditions': '' 120 + }) 121 + requests.append(result) 122 + 123 + return requests 124 + 125 + def parse_aggregate_data(self, table, symbol_table): 126 + """Parses the table(s) which contain possible links to individual 127 + data points 128 + """ 129 + results = [] 130 + for tr in table.xpath('tr[td]'): 131 + extra_data_url = tr.xpath('td[last()][a="Individual data points"]' 132 + '/a/@href').extract() 133 + if extra_data_url: 134 + request = Request(url=self.website[:-1] + extra_data_url[0], 135 + callback=self.parse_individual_datapoints) 136 + results.append(request) 137 + continue 138 + data = [] 139 + for td in tr.xpath('td'): 140 + data.append(''.join(td.xpath('node()').extract())) 141 + 142 + name = symbol_table[data[0]] 143 + condition = '' 144 + 145 + m = re.match(r'(.*) at (.*)', name) 146 + if m: 147 + name = m.group(1) 148 + condition = m.group(2) 149 + 150 + result = Result({ 151 + 'attribute': name, 152 + 'value': data[1] + ' ' + data[2], 153 + 'source': 'NIST', 154 + 'reliability': 'Unknown', 155 + 'conditions': condition 156 + }) 157 + log.msg('NIST: |%s|' % data, level=log.DEBUG) 158 + results.append(result) 159 + return results 160 + 161 + @staticmethod 162 + def parse_transition_data(table, summary): 163 + """Parses the table containing properties regarding phase changes""" 164 + results = [] 165 + 166 + tr_unit = ''.join(table.xpath('tr[1]/th[1]/node()').extract()) 167 + m = re.search(r'\((.*)\)', tr_unit) 168 + unit = '!' 169 + if m: 170 + unit = m.group(1) 171 + 172 + for tr in table.xpath('tr[td]'): 173 + tds = tr.xpath('td/text()').extract() 174 + result = Result({ 175 + 'attribute': summary, 176 + 'value': tds[0] + ' ' + unit, 177 + 'source': 'NIST', 178 + 'reliability': 'Unknown', 179 + 'conditions': '%s K, (%s -> %s)' % (tds[1], tds[2], tds[3]) 180 + }) 181 + results.append(result) 182 + 183 + 184 + return results 185 + 186 + @staticmethod 187 + def parse_generic_data(table, summary): 188 + """Parses the common tables of 4 and 5 rows. Assumes they are of the 189 + form: 190 + Symbol (unit)|Temperature (K)|Method|Reference|Comment 191 + Symbol (unit)|Temperature (K)|Reference|Comment 192 + """ 193 + results = [] 194 + 195 + tr_unit = ''.join(table.xpath('tr[1]/th[1]/node()').extract()) 196 + m = re.search(r'\((.*)\)', tr_unit) 197 + unit = '!' 198 + if m: 199 + unit = m.group(1) 200 + 201 + for tr in table.xpath('tr[td]'): 202 + tds = tr.xpath('td/text()').extract() 203 + result = Result({ 204 + 'attribute': summary, 205 + 'value': tds[0] + ' ' + unit, 206 + 'source': 'NIST', 207 + 'reliability': 'Unknown', 208 + 'conditions': '%s K' % tds[1] 209 + }) 210 + results.append(result) 211 + return results 212 + 213 + @staticmethod 214 + def parse_antoine_data(table, summary): 215 + """Parse table containing parameters for the Antione equation""" 216 + results = [] 217 + 218 + for tr in table.xpath('tr[td]'): 219 + tds = tr.xpath('td/text()').extract() 220 + result = Result({ 221 + 'attribute': summary, 222 + 'value': 'A=%s, B=%s, C=%s' % (tds[1], tds[2], tds[3]), 223 + 'source': 'NIST', 224 + 'reliability': 'Unknown', 225 + 'conditions': '%s K' % tds[0] 226 + }) 227 + results.append(result) 228 + 229 + return results 230 + 231 + def parse_individual_datapoints(self, response): 232 + """Parses the page linked from aggregate data""" 233 + sel = Selector(response) 234 + table = sel.xpath('//table[@class="data"]')[0] 235 + 236 + results = [] 237 + 238 + name = table.xpath('@summary').extract()[0] 239 + condition = '' 240 + m = re.match(r'(.*) at (.*)', name) 241 + if m: 242 + name = m.group(1) 243 + condition = m.group(2) 244 + 245 + tr_unit = ''.join(table.xpath('tr[1]/th[1]/node()').extract()) 246 + m = re.search(r'\((.*)\)', tr_unit) 247 + unit = '!' 248 + if m: 249 + unit = m.group(1) 250 + 251 + for tr in table.xpath('tr[td]'): 252 + tds = tr.xpath('td/text()').extract() 253 + uncertainty = '' 254 + m = re.search('Uncertainty assigned by TRC = (.*?) ', tds[-1]) 255 + if m: 256 + uncertainty = '+- %s ' % m.group(1) 257 + # [TODO]: get the plusminus sign working in here 258 + result = Result({ 259 + 'attribute': name, 260 + 'value': '%s %s%s' % (tds[0], uncertainty, unit), 261 + 'source': 'NIST', 262 + 'reliability': 'Unknown', 263 + 'conditions': condition 264 + }) 265 + results.append(result) 266 + 267 + return results 268 + 269 + def new_compound_request(self, compound): 270 + if compound not in self.ignore_list: 271 + self.ignore_list.update(compound) 272 + return Request(url=self.website[:-1] + self.search % compound, 273 + callback=self.parse)
+1 -1
fourmi.py
··· 102 102 103 103 # The start for the Fourmi Command Line interface. 104 104 if __name__ == '__main__': 105 - arguments = docopt.docopt(__doc__, version='Fourmi - V0.3.0') 105 + arguments = docopt.docopt(__doc__, version='Fourmi - V0.4.0') 106 106 loader = SourceLoader() 107 107 108 108 if arguments["--include"]: