Skip to content

Commit

Permalink
chore: Lint
Browse files Browse the repository at this point in the history
  • Loading branch information
jpmckinney committed Oct 5, 2023
1 parent 791b327 commit a90fdf0
Show file tree
Hide file tree
Showing 8 changed files with 19 additions and 22 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,6 @@ repos:
- id: flake8
additional_dependencies: [flake8-comprehensions]
- repo: https://github.com/pycqa/isort
rev: 5.11.4
rev: 5.12.0
hooks:
- id: isort
2 changes: 1 addition & 1 deletion ca_ab/people.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import csv
from itertools import zip_longest
from io import StringIO
from itertools import zip_longest

from utils import CanadianPerson as Person
from utils import CanadianScraper
Expand Down
2 changes: 1 addition & 1 deletion ca_bc_municipalities_candidates/people.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def scrape(self):
birth_date = 1900
seen = set()

rows = [row for row in reader]
rows = list(reader)
assert len(rows), "No councillors found"
for row in rows:
name = row["full name"]
Expand Down
7 changes: 3 additions & 4 deletions ca_on/people.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,9 @@ def scrape(self):
'//div[@class="views-element-container block block-views block-views-blockmember-member-headshot"]//img/@src'
)

district = ''.join(
district = "".join(
node.xpath(
'//div[@block="block-views-block-member-member-riding-block"]'
'//p[@class="riding"]//a//text()'
'//div[@block="block-views-block-member-member-riding-block"]' '//p[@class="riding"]//a//text()'
)
).strip()
nodes = node.xpath('//div[@id="main-content"]//a')
Expand Down Expand Up @@ -66,7 +65,7 @@ def scrape(self):
'../following-sibling::div[@class="views-field views-field-nothing"]'
'//span[@class="field-content"]'
'//strong[contains(text(),"Tel.")]'
'/following-sibling::text()[1]'
"/following-sibling::text()[1]"
)[0],
error=False,
)
Expand Down
3 changes: 2 additions & 1 deletion ca_on_toronto/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization

from utils import CanadianJurisdiction


class Toronto(CanadianJurisdiction):
classification = "legislature"
Expand Down
12 changes: 6 additions & 6 deletions ca_on_toronto/people.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@

class TorontoPersonScraper(CSVScraper):
# https://open.toronto.ca/dataset/elected-officials-contact-information/
csv_url = 'https://ckan0.cf.opendata.inter.prod-toronto.ca/dataset/27aa4651-4548-4e57-bf00-53a346931251/resource/dea217a2-f7c1-4e62-aec1-48fffaad1170/download/2022-2026%20Elected%20Officials%20Contact%20Info.csv'
csv_url = "https://ckan0.cf.opendata.inter.prod-toronto.ca/dataset/27aa4651-4548-4e57-bf00-53a346931251/resource/dea217a2-f7c1-4e62-aec1-48fffaad1170/download/2022-2026%20Elected%20Officials%20Contact%20Info.csv"
corrections = {
'district name': {
'Scarborough East': 'Scarborough-Guildwood',
"district name": {
"Scarborough East": "Scarborough-Guildwood",
},
"email": {
"councillor_ [email protected]": "[email protected]",
},
'email': {
'councillor_ [email protected]': '[email protected]',
}
}
2 changes: 1 addition & 1 deletion ca_qc_trois_rivieres/people.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def scrape(self):
email = self.lxmlize(url).xpath('//div[@class="content-page"]//a[starts-with(@href, "mailto:")]/@href')[0]

email = re.sub("^mailto:", "", email)
name, district = map(lambda x: x.strip(), member.xpath(".//figcaption//text()"))
name, district = [x.strip() for x in member.xpath(".//figcaption//text()")]
district = re.sub(r"\A(?:de|des|du) ", lambda match: match.group(0).lower(), district, flags=re.I)
role = "Conseiller"

Expand Down
11 changes: 4 additions & 7 deletions ca_sk/people.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,15 @@ def scrape(self):
district = member.xpath("./td")[2].text_content()
url = member.xpath("./td[1]/a/@href")[0]
page = self.lxmlize(url)
party = page.xpath(
'//span[@id="ContentContainer_MainContent_ContentBottom_Property4"]'
'/span'
)[0].text
party = page.xpath('//span[@id="ContentContainer_MainContent_ContentBottom_Property4"]' "/span")[
0
].text

p = Person(primary_org="legislature", name=name, district=district, role="MLA", party=party)
p.add_source(COUNCIL_PAGE)
p.add_source(url)
try:
p.image = page.xpath(
'//div[contains(@class, "mla-image-cell")]/img/@src'
)[0]
p.image = page.xpath('//div[contains(@class, "mla-image-cell")]/img/@src')[0]
except IndexError:
pass

Expand Down

0 comments on commit a90fdf0

Please sign in to comment.