Skip to content

Commit 5908a6c

Browse files
authored
* Use poetry
1 parent aa0c7f7 commit 5908a6c

File tree

15 files changed

+3676
-209
lines changed

15 files changed

+3676
-209
lines changed

.github/workflows/test.yaml renamed to .github/workflows/ci.yaml

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: Python application test
1+
name: Python application unit test
22

33
on: [pull_request]
44

@@ -16,18 +16,20 @@ jobs:
1616

1717
- name: Install dependencies
1818
run: |
19-
python -m pip install --upgrade pip
20-
pip install -e ".[dev]"
19+
pip install --upgrade pip
20+
pip install poetry
21+
poetry update
22+
poetry install -v
2123
2224
- name: Set PYTHONPATH
2325
run: echo "PYTHONPATH=\$PYTHONPATH:$(pwd)" >> $GITHUB_ENV
2426

2527
- name: Verify PYTHONPATH
2628
run: echo $PYTHONPATH
2729

28-
- name: Test with pytest
30+
- name: Unit Test with pytest
2931
env:
30-
LINEA_PUBLIC_NODE_RPC_URL: ${{ vars.LINEA_PUBLIC_NODE_RPC_URL }}
32+
LINEA_PUBLIC_NODE_RPC_URL: '${{ secrets.LINEA_PUBLIC_NODE_RPC_URL }}'
3133
run: |
3234
export PYTHONPATH=$(pwd)
3335
make test indexer

Dockerfile

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,22 @@
1-
FROM python:3.9-slim
1+
FROM python:3.9-slim AS builder
2+
3+
ENV PYTHONUNBUFFERED=1
4+
ENV PYTHONDONTWRITEBYTECODE=1
25

3-
WORKDIR /app
6+
RUN pip install poetry && poetry config virtualenvs.in-project true
47

8+
WORKDIR "/app"
59
COPY . .
610

7-
RUN pip install --no-cache-dir build && \
8-
python -m build && \
9-
pip install dist/*.whl && \
10-
rm dist/*.whl
11+
RUN poetry install
12+
RUN poetry build
13+
14+
FROM python:3.9-slim
15+
16+
WORKDIR "/app"
1117

12-
ENV PYTHONPATH=/app:$PYTHONPATH
18+
COPY --from=builder /app/migrations ./migrations
19+
COPY --from=builder /app/dist/*.whl .
20+
RUN pip install *.whl
1321

1422
ENTRYPOINT ["hemera"]

Makefile

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
VERSION := $(shell cat VERSION)
1+
VERSION := $(shell poetry version -s)
22
BUILD := `git rev-parse --short=7 HEAD`
33
SERVICES =
44
.PHONY: all build image test
@@ -10,14 +10,14 @@ RESET=\033[0m
1010

1111
image:
1212

13-
docker build $(IMAGE_FLAGS) --network host -t hemera-protocol:$(VERSION)-$(BUILD) . -q
13+
docker build $(IMAGE_FLAGS) --network host -t hemera-protocol:$(VERSION)-$(BUILD) . --no-cache
1414
echo "Built image hemera-protocol:$(VERSION)-$(BUILD)"
1515

1616
test:
1717
@if [ "$(filter-out $@,$(MAKECMDGOALS))" = "" ]; then \
18-
pytest -vv; \
18+
poetry run pytest -vv; \
1919
else \
20-
pytest -vv -m $(filter-out $@,$(MAKECMDGOALS)); \
20+
poetry run pytest -vv -m $(filter-out $@,$(MAKECMDGOALS)); \
2121
fi
2222

2323
PRE_COMMIT_INSTALLED := $(shell command -v pre-commit > /dev/null 2>&1 && echo yes || echo no)

VERSION

Lines changed: 0 additions & 1 deletion
This file was deleted.

__init__.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from pathlib import Path
22

3-
VERSION_FILE = Path(__file__).parent / "VERSION"
4-
__version__ = VERSION_FILE.read_text().strip()
3+
import tomli
4+
5+
__version__ = "0.3.0"

cli/__init__.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,11 @@
1111

1212
logging_basic_config()
1313

14+
from importlib import metadata
15+
1416

1517
def get_version():
16-
version_file = Path(__file__).parent.parent / "VERSION"
17-
return version_file.read_text().strip()
18+
return metadata.version("hemera")
1819

1920

2021
@click.group()

docker-compose/docker-compose.yaml

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ x-common-settings: &common-settings
99
networks:
1010
- hemera
1111
volumes:
12-
- ./output:/app/output
12+
- hemera-output:/app/output
1313
depends_on:
1414
postgresql:
1515
condition: service_healthy
@@ -19,13 +19,20 @@ services:
1919
<<: *common-settings
2020
container_name: indexer
2121
environment:
22+
- AUTO_UPGRADE_DB=true
2223
- ENTITY_TYPES=EXPLORER_BASE,EXPLORER_TOKEN
2324
- SYNC_RECORDER=pg:main_recorder
2425

2526
hemera-trace-indexer:
2627
<<: *common-settings
28+
depends_on:
29+
postgresql:
30+
condition: service_healthy
31+
hemera-main-indexer:
32+
condition: service_started
2733
container_name: indexer-trace
2834
environment:
35+
- AUTO_UPGRADE_DB=false
2936
- ENTITY_TYPES=EXPLORER_TRACE
3037
- SYNC_RECORDER=pg:trace_recorder
3138

@@ -59,7 +66,7 @@ services:
5966
networks:
6067
- hemera
6168
volumes:
62-
- ./postgres:/var/lib/postgresql/data
69+
- hemera-postgres:/var/lib/postgresql/data
6370

6471
redis:
6572
image: redis:6
@@ -69,5 +76,9 @@ services:
6976
networks:
7077
- hemera
7178

79+
volumes:
80+
hemera-postgres:
81+
hemera-output:
82+
7283
networks:
7384
hemera:

docker-compose/hemera-indexer.env

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
PROVIDER_URI=https://eth.llamarpc.com
22
DEBUG_PROVIDER_URI=https://eth.llamarpc.com
33
START_BLOCK=20159954
4-
POSTGRES_URL=postgresql+psycopg2://user:Ts6YZSrGegXr8PPONtHcSLAHDGd1fjwHBjFZ6WVNpgiOmGf7ghYXl0I1@postgresql:5432/postgres
4+
POSTGRES_URL=postgresql://user:Ts6YZSrGegXr8PPONtHcSLAHDGd1fjwHBjFZ6WVNpgiOmGf7ghYXl0I1@postgresql:5432/postgres
55
OUTPUT=postgres
66

77
# This only works when you run this for the very first time. If the container has been created, you will need to remove the volume and recreate the container

hemera_api.py

Lines changed: 0 additions & 18 deletions
This file was deleted.

indexer/domain/token_id_infos.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def from_token_dict(token_dict: dict):
2828
class ERC721TokenIdDetail(Domain):
2929
token_address: str
3030
token_id: int
31-
token_uri: str
31+
token_uri: Optional[str]
3232
block_number: int
3333
block_timestamp: int
3434
token_uri_info: Optional[str] = None

indexer/tests/jobs/__init__.py

Whitespace-only changes.

indexer/tests/jobs/test_token_id_infos_job.py

Lines changed: 0 additions & 96 deletions
Original file line numberDiff line numberDiff line change
@@ -336,99 +336,3 @@ def test_export_token_id_info_job_on_linea_mul():
336336
)
337337

338338
job_scheduler.clear_data_buff()
339-
340-
341-
@pytest.mark.indexer
342-
@pytest.mark.indexer_exporter
343-
@pytest.mark.serial
344-
def test_export_token_id_info_job_on_linea_mul():
345-
job_scheduler = JobScheduler(
346-
batch_web3_provider=ThreadLocalProxy(
347-
lambda: get_provider_from_uri(
348-
LINEA_PUBLIC_NODE_RPC_URL,
349-
batch=True,
350-
)
351-
),
352-
batch_web3_debug_provider=ThreadLocalProxy(
353-
lambda: get_provider_from_uri(
354-
LINEA_PUBLIC_NODE_RPC_URL,
355-
batch=True,
356-
)
357-
),
358-
item_exporters=[ConsoleItemExporter()],
359-
batch_size=10,
360-
debug_batch_size=1,
361-
max_workers=1,
362-
config={},
363-
required_output_types=[
364-
ERC721TokenIdChange,
365-
ERC721TokenIdDetail,
366-
UpdateERC721TokenIdDetail,
367-
ERC1155TokenIdDetail,
368-
UpdateERC1155TokenIdDetail,
369-
],
370-
multicall=True,
371-
)
372-
373-
job_scheduler.run_jobs(
374-
start_block=8494071,
375-
end_block=8494071,
376-
)
377-
378-
data_buff = job_scheduler.get_data_buff()
379-
380-
erc721_token_id_changes = data_buff[ERC721TokenIdChange.type()]
381-
assert len(erc721_token_id_changes) == 1
382-
assert (
383-
ERC721TokenIdChange(
384-
token_address="0x6e84390dcc5195414ec91a8c56a5c91021b95704",
385-
token_id=110042221770367602542853534930234725702383442308140339620523913150618217206456,
386-
token_owner="0xa53cca02f98d590819141aa85c891e2af713c223",
387-
block_number=8494071,
388-
block_timestamp=1724397109,
389-
)
390-
in erc721_token_id_changes
391-
)
392-
393-
erc721_token_id_details = data_buff[ERC721TokenIdDetail.type()]
394-
assert len(erc721_token_id_details) == 1
395-
assert (
396-
ERC721TokenIdDetail(
397-
token_address="0x6e84390dcc5195414ec91a8c56a5c91021b95704",
398-
token_id=110042221770367602542853534930234725702383442308140339620523913150618217206456,
399-
token_uri="",
400-
block_number=8494071,
401-
block_timestamp=1724397109,
402-
token_uri_info=None,
403-
)
404-
in erc721_token_id_details
405-
)
406-
407-
update_erc721_token_id_details = data_buff[UpdateERC721TokenIdDetail.type()]
408-
assert len(update_erc721_token_id_details) == 1
409-
assert (
410-
UpdateERC721TokenIdDetail(
411-
token_address="0x6e84390dcc5195414ec91a8c56a5c91021b95704",
412-
token_id=110042221770367602542853534930234725702383442308140339620523913150618217206456,
413-
token_owner="0xa53cca02f98d590819141aa85c891e2af713c223",
414-
block_number=8494071,
415-
block_timestamp=1724397109,
416-
)
417-
in update_erc721_token_id_details
418-
)
419-
420-
erc1155_token_id_details = data_buff[ERC1155TokenIdDetail.type()]
421-
assert len(erc1155_token_id_details) == 1
422-
assert (
423-
ERC1155TokenIdDetail(
424-
token_address="0xa53cca02f98d590819141aa85c891e2af713c223",
425-
token_id=54780668040604116915679158082040366453838453357839560563054770201457212183923,
426-
token_uri="ens-metadata-service.appspot.com/name/0x{id}",
427-
block_number=8494071,
428-
block_timestamp=1724397109,
429-
token_uri_info=None,
430-
)
431-
in erc1155_token_id_details
432-
)
433-
434-
job_scheduler.clear_data_buff()

indexer/utils/token_fetcher.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ def create_token_detail(self, token_info, value, decode_flag):
152152

153153
if token_info["is_get_token_uri"]:
154154
try:
155-
token_uri = decode_string(value) if decode_flag else None
155+
token_uri = decode_string(value) if decode_flag else value
156156
except Exception as e:
157157
token_uri = None
158158
logging.error(f"decode token uri failed, token_info={token_info}, value={value}")
@@ -172,7 +172,7 @@ def create_token_detail(self, token_info, value, decode_flag):
172172
return [UpdateERC1155TokenIdDetail(**common_args, token_supply=token_supply)]
173173
except Exception as e:
174174
exception_recorder.log(
175-
block_number=token_info.block_number,
175+
block_number=token_info["block_number"],
176176
dataclass=to_snake_case("token_id_info"),
177177
message_type="decode_token_id_info_fail",
178178
message=str(e),

0 commit comments

Comments
 (0)