index.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495
  1. #!/usr/bin/python3
  2. import mysql.connector
  3. import requests
  4. from bs4 import BeautifulSoup
  5. import urllib.parse
  6. import re
  7. from sys import exit as exit
  8. import json
  9. import datetime
  10. import custom_email
  11. from tabulate import tabulate
  12. from configparser import ConfigParser
  13. from os import path
  14. import logging
  15. ### TO DO ###
  16. #
  17. # email results
  18. # allow this script to be called and work by itself (if __name__ == __main__)
  19. # Print useful reports (land only, house and land, etc)
  20. # Check if db entries no longer appear online (mark expired)
  21. # When checking online from various sites, check if address already exists in db
  22. # - if so, warn user and do not add
  23. # Add date_added to initial entries
  24. # Check results against database for changes
  25. # - update and add/change date_modified
  26. # Add argument to run update query when results.py is calles
  27. # Add database column to hold parcel number. Make links to GIS servers
  28. #
  29. # IDENTIFY NEW PROPERTIES!!
  30. #
  31. # Automate db opening and closing when calling dbinsert()
  32. #
  33. #############
  34. class Property:
  35. """Description of a proerty"""
  36. def __init__ (self, site_name, type, MLS, address, city, st, zip, \
  37. county, price, acres, title='', sqft=0, bedrooms=0, baths=0, description='', link=''):
  38. self.site_name = site_name
  39. self.type = type
  40. self.MLS = MLS
  41. self.title = title
  42. self.sqft = sqft
  43. self.bedrooms = bedrooms
  44. self.baths = baths
  45. self.address = address
  46. self.city = city
  47. self.st = st
  48. self.zip = zip
  49. self.county = county
  50. self.price = price
  51. self.acres = acres
  52. self.description = description
  53. self.link = link
  54. class Search:
  55. """Universal Search Criteria"""
  56. def checktype(self, attribute):
  57. if not attribute == 'None':
  58. return attribute
  59. else:
  60. return ''
  61. # def __init__(self, county: list, lower_price=0, upper_price=500000, \
  62. # lower_acres=5, upper_acres=15, type=['farm','land','home'], lower_sqft='', upper_sqft='', \
  63. # lower_bedrooms='', upper_bedrooms=''):
  64. def __init__(self, file = 'landsearch.conf'):
  65. self.file = file
  66. if not path.exists(self.file):
  67. raise FileNotFoundError("The config file cannot be opened", self.file)
  68. try:
  69. config = ConfigParser()
  70. config.read(self.file)
  71. search_params = config['Search']
  72. log_params = config['Logging']
  73. except FileNotFoundError as err:
  74. print(err, "Using default search parameters.")
  75. except Exception as err:
  76. print(err, "Using default search parameters.")
  77. logging.basicConfig(filename=log_params.get('log_file'), \
  78. level=log_params.get('logging_level', 30), \
  79. format='%(asctime)s %(levelname)-8s %(message)s', \
  80. datefmt='%Y-%m-%d %H:%M:%S') ## Default log level WARNING (30)
  81. logging.getLogger("urllib3").setLevel(logging.WARNING) ## Supress Requests method logging
  82. logging.debug("Log level set to %s", logging.root.level)
  83. county = search_params.get('county', ['Gwinnett', 'Hall', 'Jackson', 'Walton', 'Barrow'])
  84. if isinstance(county, str):
  85. county = county.split(", ")
  86. type = search_params.get('type', ['farm', 'house', 'land'])
  87. if isinstance(type, str):
  88. type = type.split(", ")
  89. self.types=['land', 'farm', 'home', 'house']
  90. self.county = county
  91. self.lower_price = self.checktype(search_params.get('lower_price', 0))
  92. self.upper_price = self.checktype(search_params.get('upper_price', 525000))
  93. self.lower_acres = self.checktype(search_params.get('lower_acres', 5))
  94. self.upper_acres = self.checktype(search_params.get('upper_acres', 15))
  95. self.type = type ##accept list!
  96. self.lower_sqft = self.checktype(search_params.get('lower_sqft', ''))
  97. self.upper_sqft = self.checktype(search_params.get('upper_sqft', ''))
  98. self.lower_bedrooms = self.checktype(search_params.get('lower_bedrooms', ''))
  99. self.upper_bedrooms = self.checktype(search_params.get('upper_bedrooms', ''))
  100. # self.lower_price = search_params.get('lower_price', 0)
  101. # self.upper_price = search_params.get('upper_price', 525000)
  102. # self.lower_acres = search_params.get('lower_acres', 5)
  103. # self.upper_acres = search_params.get('upper_acres', 15)
  104. # self.lower_sqft = search_params.get('lower_sqft', '')
  105. # self.upper_sqft = search_params.get('upper_sqft', '')
  106. # self.lower_bedrooms = search_params.get('lower_bedrooms', '')
  107. # self.upper_bedrooms = search_params.get('upper_bedrooms', '')
  108. for property_type in self.type:
  109. assert property_type in self.types, ("Unknown type '" + property_type + "'. Property Type must be of type: " + str(self.types))
  110. ## FOR TESTING, PRINT ALL ATTRIBUTES OF SEARCH ##
  111. logging.debug(vars(self))
  112. class ImproperSearchError(Exception):
  113. def __init__ (self, search, message="Improper Search. Must use instance of Search class"):
  114. self.search = search
  115. self.message = message
  116. super().__init__(self.message)
  117. class MLSDATA:
  118. """Fetches and stores MLS Data
  119. Currently only supports GeorgiaMLS.com (GMLS)"""
  120. counties=['Gwinnett', 'Barrow', 'Hall', 'Jackson', 'Walton']
  121. GoogleAPIKey = 'AIzaSyAXAnpBtjv760W8YIPqKZ0dFXpwAaZN7Es'
  122. live_google = True
  123. def __init__ (self, mlstype):
  124. self.help = "This is a class that will retrieve MLS data from various sources, store the info in a database, and run queries on the data."
  125. self.mlstype = mlstype.lower() ## Determines what kind of data is to be retreieve (gmls, Zillow, etc)
  126. self.cursor = ''
  127. self.cnx = ''
  128. self.new_listings = []
  129. def stringbuilder(self, search: Search, county):
  130. """ Takes Search class and build appropriate URL query based on mlstype. Currently only supports gmls."""
  131. if self.mlstype == 'gmls':
  132. base_addr = 'https://www.georgiamls.com/real-estate/search-action.cfm?'
  133. params = [('cnty', county), \
  134. ('lpl', search.lower_price), ('lph', search.upper_price), \
  135. ('acresL', search.lower_acres), ('acresH', search.upper_acres), \
  136. ('sqftl', search.lower_sqft), ('sqfth', search.upper_sqft), \
  137. ('orderBy', 'b'), \
  138. ('scat', '1'), \
  139. ('sdsp', 'g')]
  140. for type in search.type:
  141. if 'land' in type.lower():
  142. params.append(('typ', 'll'))
  143. if 'farm' in type.lower():
  144. params.append(('typ', 'af'))
  145. if 'home' in type.lower():
  146. params.append(('typ', 'sd'))
  147. if 'house' in type.lower():
  148. params.append(('typ', 'sd'))
  149. search_string = base_addr + urllib.parse.urlencode(params)
  150. print(search_string)
  151. return search_string
  152. def break_address(self, address):
  153. """Takes an address string in the form 'street address|city, state zip' and returns a list"""
  154. street = address[:address.find('|')]
  155. csz = address[address.find('|')+1:]
  156. city = csz[:csz.find(',')]
  157. st = csz[csz.find(',')+1:].split(' ')[1]
  158. zip = csz[csz.find(',')+1:].split(' ')[2]
  159. split_address = [street, city, st, zip]
  160. return split_address
  161. def gmlsparser(self, URL, county, pages=''):
  162. """ Retrieve the website for georgiamls.com and returns a list of Property objects.
  163. UNIQUE TO GEORGIAMLS.COM ONLY!!"""
  164. properties_list = []
  165. r = requests.get(URL)
  166. soup = BeautifulSoup(r.content, 'html5lib')
  167. if pages == '':
  168. try:
  169. pages = soup.find("div", {'class':'small listing-pagination-count'}).getText().strip().split(" ")[-1]
  170. current_page = soup.find("div", {'class':'small listing-pagination-count'}).getText().strip().split(" ")[-3]
  171. except AttributeError as err:
  172. print("No Results Found.")
  173. return
  174. else:
  175. print('pages already set to: ' + str(pages))
  176. for page in range(0, int(pages)):
  177. print('Processing Page: ' + str(page + 1) + ' of ' + str(pages))
  178. if not page == 0:
  179. next_URL = URL + '&start=' + str(((12*page)+1))
  180. soup = BeautifulSoup(requests.get(next_URL).content, 'html5lib')
  181. raw_listings = soup.findAll("div", {'class':'col-xs-12 col-sm-6 col-lg-4 text-center listing-gallery'})
  182. for listing in raw_listings:
  183. items = listing.findAll("p") ##
  184. site_name = self.mlstype
  185. MLS = " ".join(items[3].getText().strip()[6:15].split()) ## MLS NUMBER
  186. title = '' ## Listing Title (address if no title)
  187. price = items[0].string.strip() ## Price
  188. if self.mlstype == 'gmls':
  189. link = 'https://www.georgiamls.com' + listing.a['href']
  190. detail_request = requests.get(link)
  191. detail_soup = BeautifulSoup(detail_request.content, 'html5lib')
  192. details = detail_soup.findAll('tr')
  193. bedbath = details[1].findAll('td')[1].getText().strip().split('/')
  194. br = bedbath[0][:-3]
  195. ba = bedbath[1][:-3]
  196. baths = ba ## IF House is present
  197. bedrooms = br ## IF House is present
  198. address = ''
  199. for element in details:
  200. if 'sqft' in element.getText():
  201. sqft = element.findAll('td')[1].getText().strip()[:-5].replace(',','')
  202. if 'lot size' in element.getText().lower():
  203. acres = element.findAll('td')[1].getText().strip()[:-6]
  204. if 'Property Type' in element.getText():
  205. ptype = element.findAll('td')[1].getText().strip()
  206. if 'acreage' in ptype.lower():
  207. type = 'af'
  208. elif 'land lot' in ptype.lower():
  209. type = 'll'
  210. elif 'single family home' in ptype.lower():
  211. type = 'sf'
  212. else:
  213. type = 'unknown'
  214. if 'Address' in element.getText():
  215. if not address: #Prevents finding the word 'address' elsewhere in the listings
  216. address = element.findAll('td')[1]
  217. #7 print("TEST ADDRESS: ", element)
  218. street_address = list(address)[0].strip()
  219. csz = list(address)[2].strip()
  220. split_address = self.break_address(street_address + '|' + csz)
  221. description = detail_soup.find('div', {'id':'listing-remarks'}).getText().strip().replace('\t','')
  222. data = Property(site_name = self.mlstype, \
  223. type = type, \
  224. MLS = MLS, \
  225. bedrooms = bedrooms, \
  226. baths = baths, \
  227. sqft = sqft, \
  228. address = split_address[0], \
  229. city = split_address[1].title(), \
  230. st = split_address[2].upper(), \
  231. zip = split_address[3], \
  232. county = county.title(), \
  233. price = price.replace('$','').replace(',',''), \
  234. acres = acres, \
  235. description = description, \
  236. link = link)
  237. properties_list.append(data)
  238. print('Scanned: ' + data.address)
  239. return properties_list
  240. def getmlsdata(self, search: Search, county):
  241. """This is the main entrypoint. Takes arguments to pass to stringbuilder to create the URL.
  242. Selects appropriate parser based on self.mlstype from class intance.
  243. Needs any modifications from the standard search ($0 to $500,000, 5 to 15 acres, etc)
  244. See class search for more information.
  245. --> 9/1/20 - takes Search class as argument. All properties are handled by the class <--"""
  246. if isinstance(search, Search):
  247. ##
  248. # PROGRAM BREAKS HERE - Used to loop for each county, not Search class contains list of counties. Need to automate looping.
  249. ##
  250. if not county in self.counties: ### FIX for lower()
  251. print("County " + county + " not regognized. Exiting")
  252. else:
  253. print("Scanning for results in " + county + " using the " + self.mlstype.upper() + " database.")
  254. if self.mlstype == 'gmls':
  255. list = self.gmlsparser(self.stringbuilder(search, county), county)
  256. return list
  257. else:
  258. raise ImproperSearchError(search)
  259. def checkdb(self, criteria_dict):
  260. """Check dictionary of critera against database.
  261. Currently accepts keys: MLS, title, address (street number/name, not city/state/zip).
  262. Returns True if records exists."""
  263. if self.cursor: ## Check if DB is connected
  264. for criteria in criteria_dict:
  265. ## Determine criteria passed, and execute queries for each
  266. if criteria == 'MLS':
  267. self.cursor.execute("SELECT COUNT(*) FROM properties WHERE MLS = %(MLS)s GROUP BY id", {criteria:criteria_dict[criteria]})
  268. if self.cursor.rowcount > 0: return self.cursor.rowcount # stop for loop if match already found.
  269. elif criteria == 'title':
  270. self.cursor.execute("SELECT COUNT(*) FROM properties WHERE title = %(title)s GROUP BY id", {criteria:criteria_dict[criteria]})
  271. if self.cursor.rowcount > 0: return self.cursor.rowcount # stop for loop if match already found.
  272. elif criteria == 'address':
  273. self.cursor.execute("SELECT COUNT(*) FROM properties WHERE address = %(address)s GROUP BY id", {criteria:criteria_dict[criteria]})
  274. if self.cursor.rowcount > 0: return self.cursor.rowcount # stop for loop if match already found.
  275. else:
  276. print("Cannot search on parameter: " + criteria)
  277. return self.cursor.rowcount
  278. else:
  279. print("Database is not connected or cursor not filled. Use function 'connectdb()' to establish")
  280. def getGoogle(self, property):
  281. """Supplies date from Google Distance Matrix API to populate
  282. distance_to_work
  283. time_to_work
  284. distance_to_school
  285. time_to_school
  286. Costs money, so it should only be called when inserting a new db record.
  287. Returns distance in METERS (1m = 0.000621371 mi) and time in SECONDS
  288. returns fully populated Propery object."""
  289. print("Fetching live Google Data. $$")
  290. # Build Request
  291. destination1 = 'Hebron Christian Acadamy' ## Working query for Hebron Christian Acadamy
  292. destination2 = 'JHRJ+FJ Atlanta, Georgia' ## Plus code for Hourly parking at Int'l Terminal, KATL
  293. params = {}
  294. params['units'] = 'imperial'
  295. params['origins'] = property.address + ', ' + property.city + ' ' + property.st
  296. params['destinations'] = 'Hebron Christian Acadamy|JHRJ+FJ Atlanta, Georgia'
  297. params['key'] = self.GoogleAPIKey
  298. baseURL = 'https://maps.googleapis.com/maps/api/distancematrix/json?'
  299. API_URL = baseURL + urllib.parse.urlencode(params)
  300. # print(API_URL)
  301. # Send Request and capture result as json
  302. try:
  303. google_result = requests.get(API_URL).json()
  304. if google_result['status'] == 'OK':
  305. property.distance_to_school = google_result['rows'][0]['elements'][0]['distance']['value']
  306. property.time_to_school = google_result['rows'][0]['elements'][0]['duration']['value']
  307. property.distance_to_work = google_result['rows'][0]['elements'][1]['distance']['value']
  308. property.time_to_work = google_result['rows'][0]['elements'][1]['duration']['value']
  309. except:
  310. print("ERROR: Failed to obtain Google API data")
  311. #Load sample data for testing:
  312. # with open('complex.json') as f:
  313. # data = json.load(f)
  314. # google_result = data
  315. ### end testing json ###
  316. def insertrecord(self, property, work_address=None, school_address=None):
  317. """Inserts record into database. Takes argument Property class object.
  318. FUTURE - add date_added field to insert operation."""
  319. if self.cursor:
  320. criteria_dict = property.__dict__
  321. criteria_dict['Date_Added'] = str(datetime.date.today())
  322. placeholder_columns = ", ".join(criteria_dict.keys())
  323. placeholder_values = ", ".join([":{0}".format(col) for col in criteria_dict.keys()])
  324. qry = "INSERT INTO properties ({placeholder_columns}) VALUES {placeholder_values}".format(placeholder_columns=placeholder_columns, placeholder_values=tuple(criteria_dict.values()))
  325. self.cursor.execute(qry)
  326. self.cnx.commit()
  327. print("Inserted " + criteria_dict['MLS'] + " | " + criteria_dict['address'] + " into database.")
  328. else:
  329. print("Database is not connected or cursor not filled. Use function 'connectdb()' to establish")
  330. def connectdb(self, host='192.168.100.26', user='landsearchuser', password='1234', database='landsearch'):
  331. """Connects to database and returns a cursor object"""
  332. self.cnx = mysql.connector.connect(host=host, user=user, password=password, database=database, buffered=True)
  333. self.cursor = self.cnx.cursor()
  334. return self.cursor
  335. def closedb(self):
  336. """Cleanly close the db."""
  337. self.cursor.close()
  338. self.cnx.close()
  339. def dbinsert(self, properties: list):
  340. """Inserts records into database. Takes list of Property class objects"""
  341. if not properties == None:
  342. if not isinstance(properties, list):
  343. raise TypeError('type list required')
  344. for property in properties:
  345. if not self.checkdb({'MLS': property.MLS, 'address': property.address}):
  346. if self.live_google: self.getGoogle(property) ## <- This will populate distance and time fields if set TRUE
  347. self.insertrecord(property)
  348. self.new_listings.append(property)
  349. else:
  350. print(property.MLS + ' | ' + property.address + ' is already in db. Not inserted.')
  351. ##REMOVE FOR TESTING###
  352. # self.new_listings.append(property)
  353. #######################
  354. else:
  355. print("Empty dataset. No records to insert.")
  356. def alerts(self):
  357. pass
  358. def email(self):
  359. body = ''
  360. data = []
  361. subj = "New Real Estate Listings for " + str(datetime.date.today())
  362. for listing in self.new_listings:
  363. row = []
  364. body += listing.MLS + " | " + listing.address + " | " + listing.acres + " | " + listing.price + " | " + listing.link + "\n"
  365. row.append(listing.MLS)
  366. row.append(listing.address)
  367. row.append('{:0,.2f}'.format(float(listing.acres)))
  368. row.append(listing.sqft)
  369. row.append('${:0,.0f}'.format(int(listing.price)))
  370. row.append(listing.time_to_school/60 if hasattr(listing, 'time_to_school') else 'NA')
  371. row.append(listing.link)
  372. data.append(row)
  373. body = """\
  374. Daily Real Estate Search Report\n
  375. The following properties have been found which may be of interest.\n
  376. """
  377. results = tabulate(data, headers=['MLS', 'Address', 'Acres', 'sqft', 'Price', 'Time to School', 'link'])
  378. body += results
  379. sendto = ['stagl.mike@gmail.com', 'M_Stagl@hotmail.com']
  380. mymail = custom_email.simplemail(subj, body, sendto)
  381. if len(self.new_listings) > 0:
  382. try:
  383. mymail.sendmail()
  384. except Exception as e:
  385. print("Error sending email. " + e)
  386. else:
  387. print("No new listings. Email not sent")
  388. # REMOVE AFTER TESTING #
  389. mymail.sendmail()
  390. ########################
  391. ########### BEGIN CODE ###############33
  392. if __name__ == '__main__':
  393. gmls = MLSDATA('GMLS') # Create MLSDATA object
  394. mysearch = Search() # Create a custom search object
  395. # print(len(mysearch.county))
  396. # print(mysearch.county[0])
  397. myresults = []
  398. ## Create function in MLSDATA module:
  399. # - takes counties from configparser and calls getmlsdata for each county.
  400. # - Compiles results into single list and returns that list
  401. # - User code would look something like this:
  402. # _ mysearch = Search()
  403. # _ mydata = gmls.findalllistings(mysearch) # This would control the looping of counties and return a list like normal
  404. # _ gmls.dbinsert(myresults) # This would automate db opening and closing
  405. for county in mysearch.county:
  406. print("local search: ", county)
  407. mysearch = Search() ## Search used to take county as parameter, so this loop would work. Now Search class contains list. loop must occur in getmlsdata module
  408. mydata = gmls.getmlsdata(mysearch, county)
  409. for listing in mydata:
  410. myresults.append(listing)
  411. # print(len(myresults))
  412. # print(myresults[0].address)
  413. gmls.connectdb()
  414. gmls.dbinsert(myresults)
  415. gmls.closedb()
  416. #
  417. # gmls.email()
  418. #
  419. #print()
  420. #print(str(len(gmls.new_listings)) + " new properties found!")
  421. #print()
  422. #for listing in gmls.new_listings:
  423. # print(listing.MLS, listing.address)
  424. # gmls = MLSDATA('GMLS')
  425. #
  426. # #new_properties = []
  427. #
  428. ## for county in ['Jackson']: ### FIX
  429. # for county in gmls.counties: ### FIX
  430. # mysearch = Search(county, type=['farm', 'house', 'land'], upper_price=525000) ### FIX
  431. # mydata = gmls.getmlsdata(mysearch)
  432. #
  433. # gmls.connectdb()
  434. # gmls.dbinsert(mydata)
  435. # gmls.closedb()
  436. #
  437. # gmls.email()
  438. #
  439. #print()
  440. #print(str(len(gmls.new_listings)) + " new properties found!")
  441. #print()
  442. #for listing in gmls.new_listings:
  443. # print(listing.MLS, listing.address)
  444. #
  445. #