Skip to content

Commit

Permalink
fix: make build script functional within codespaces
Browse files Browse the repository at this point in the history
  • Loading branch information
Ignigena authored Oct 24, 2024
1 parent 9f2b4a9 commit ecb23f4
Show file tree
Hide file tree
Showing 6 changed files with 38 additions and 23 deletions.
4 changes: 4 additions & 0 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@
}
},

"features": {
"ghcr.io/devcontainers/features/python:1": {}
},

"onCreateCommand": "echo \"//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN\" > ~/.npmrc",
"postCreateCommand": "npm ci",
"postAttachCommand": "echo \"//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN\" > ~/.npmrc"
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ jobs:
- run: npm ci
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: make
- run: npm test
- name: Report coverage
uses: codecov/codecov-action@v4
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@ TARGET = iso3166.min.js
$(TARGET): parse.py data.csv codes.csv functions.js
python parse.py
echo ";(function () {" > $(TARGET)
./node_modules/.bin/uglifyjs -c -m --lint data.js functions.js >> $(TARGET)
./node_modules/.bin/uglifyjs -c -m -- data.js functions.js >> $(TARGET)
echo "})();" >> $(TARGET)
rm data.js
20 changes: 13 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"benchmark": "^2.1.4",
"tape": "*",
"tinybench": "^2.9.0",
"uglifyjs": "^2.4.10",
"uglify-js": "^3.19.3",
"underscore": "^1.8.3"
}
}
32 changes: 18 additions & 14 deletions parse.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
import csv
import json
import re
from functools import reduce


def unicode_csv_reader(utf8_data, **kwargs):
csv_reader = csv.reader(utf8_data, dialect=csv.excel, **kwargs)
for row in csv_reader:
yield [unicode(cell, "utf-8") for cell in row]
# No need to decode from utf-8, as strings are Unicode in Python 3.
yield [cell for cell in row]


# countries and their subdivisions.
with open("data.csv", "r") as csv_file:
with open("data.csv", "r", encoding="utf-8") as csv_file:
countries = {}
for row in unicode_csv_reader(csv_file):
country_name = row[0]
Expand All @@ -23,32 +26,33 @@ def unicode_csv_reader(utf8_data, **kwargs):
"name": subdivision_name.strip(),
"type": type.strip()
}

subdivisions = reduce(
lambda a, b: a + len(countries[b].keys()), countries, 0
lambda a, b: a + len(list(countries[b]["sub"].keys())), countries, 0
)

print "Countries: %d, Subdivisions: %d" % (
len(countries.keys()), subdivisions
)
print("Countries: %d, Subdivisions: %d" % (
len(countries), subdivisions
))

with open("data.js", "w") as json_file:
print "Dumping subdivisions to data.js"
with open("data.js", "w", encoding="utf-8") as json_file:
print("Dumping subdivisions to data.js")
json_file.write("var data = ")
json.dump(countries, json_file)
json.dump(countries, json_file, ensure_ascii=False)
json_file.write(";")

# alpha-3 to alpha-2 country code conversions
with open("codes.csv", "r") as csv_file:
with open("codes.csv", "r", encoding="utf-8") as csv_file:
codes = {}
for row in unicode_csv_reader(csv_file):
alpha2 = row[0]
alpha3 = row[1]
codes[alpha3] = alpha2

print "Country codes: %d" % len(codes.keys())
print("Country codes: %d" % len(codes))

with open("data.js", "a") as json_file:
print "Dumping codes to data.js"
with open("data.js", "a", encoding="utf-8") as json_file:
print("Dumping codes to data.js")
json_file.write("var codes = ")
json.dump(codes, json_file)
json.dump(codes, json_file, ensure_ascii=False)
json_file.write(";")

0 comments on commit ecb23f4

Please sign in to comment.