Neurolingua commited on
Commit
e8db129
1 Parent(s): e8772f1

Update other_function.py

Browse files
Files changed (1) hide show
  1. other_function.py +16 -20
other_function.py CHANGED
@@ -111,10 +111,8 @@ def get_weather(city):
111
  degree=temperature[:-2]
112
  celcius=str(round((int(degree) - 32)* 5/9,1))+temperature[-2]+'C'
113
  return (celcius)
114
- import scrapy
115
- from scrapy.crawler import CrawlerProcess
116
- import pandas as pd
117
- from scrapy.crawler import CrawlerProcess
118
  from scrapy.utils.project import get_project_settings
119
  import scrapy
120
 
@@ -129,22 +127,20 @@ class RateSpider(scrapy.Spider):
129
  commodity = row.xpath('td[1]/text()').get()
130
  price = row.xpath('td[2]/text()').get()
131
  data[commodity] = price
132
- return data
133
 
 
134
  def get_rates():
135
- # Set up a Scrapy process
136
- process = CrawlerProcess(get_project_settings())
137
-
138
- # Set up a dictionary to store the scraped data
139
- data = {}
140
 
141
- # Run the spider
142
- def crawler_finished(signal, sender, item, response, spider):
143
- data.update(item)
144
-
145
- process.signals.connect(crawler_finished, signal=scrapy.signals.item_scraped)
146
- process.crawl(RateSpider)
147
- process.start() # This will block until the crawling is finished
148
-
149
- # Return the scraped data as a string (or format as needed)
150
- return str(data) + ' These prices are for 1 kg'
 
 
111
  degree=temperature[:-2]
112
  celcius=str(round((int(degree) - 32)* 5/9,1))+temperature[-2]+'C'
113
  return (celcius)
114
+ from twisted.internet import reactor, defer
115
+ from scrapy.crawler import CrawlerRunner
 
 
116
  from scrapy.utils.project import get_project_settings
117
  import scrapy
118
 
 
127
  commodity = row.xpath('td[1]/text()').get()
128
  price = row.xpath('td[2]/text()').get()
129
  data[commodity] = price
130
+ yield data
131
 
132
+ @defer.inlineCallbacks
133
  def get_rates():
134
+ data = []
 
 
 
 
135
 
136
+ def process_data(item, response, spider):
137
+ data.append(item)
138
+
139
+ runner = CrawlerRunner(get_project_settings())
140
+ d = runner.crawl(RateSpider)
141
+ d.addBoth(lambda _: reactor.stop())
142
+ reactor.run()
143
+
144
+ # Convert the list of data to a string
145
+ result = ''.join([str(d) for d in data]) + ' These prices are for 1 kg'
146
+ return result