Skip to content

Commit ec1ccc6

Browse files
committed
Add user-test-activity3.py script, which perform various query and download tests for gss instance.
1 parent 8240f24 commit ec1ccc6

File tree

4 files changed

+327
-0
lines changed

4 files changed

+327
-0
lines changed

HTTPAuthOptions.py

Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
import requests
2+
from requests.auth import HTTPBasicAuth
3+
from requests.auth import AuthBase
4+
import logging
5+
import netrc
6+
from urllib.parse import urlparse
7+
8+
# Custom authentication class for Bearer Token
9+
class HTTPBearerAuth(AuthBase):
10+
def __init__(self, token_file=".token"):
11+
logging.debug(f"Initializing {type(self)}")
12+
self.token = self._read_token(token_file)
13+
14+
def _read_token(self, token_file):
15+
"""Reads the token from a file."""
16+
try:
17+
logging.debug(f"Reading file {token_file}")
18+
with open(token_file, "r") as f:
19+
return f.read().strip()
20+
except FileNotFoundError:
21+
logging.error(f"Token file '{token_file}' not found.")
22+
raise
23+
24+
def __call__(self, r):
25+
"""Attach the Bearer token to the request headers."""
26+
r.headers["Authorization"] = f"Bearer {self.token}"
27+
#r.headers["Accept"] = "application/json"
28+
return r
29+
30+
class FileBasedBasicAuth(HTTPBasicAuth):
31+
def __init__(self, filepath=".basic-auth"):
32+
"""
33+
Initializes the authentication object by reading credentials from the file.
34+
35+
:param filepath: Path to the .basic-auth file (default: ".basic-auth")
36+
"""
37+
logging.debug(f"Initializing {type(self)}")
38+
username, password = self._read_credentials(filepath)
39+
super().__init__(username, password)
40+
41+
def _read_credentials(self, filepath):
42+
"""Reads user:password from the specified file."""
43+
try:
44+
with open(filepath, "r") as f:
45+
line = f.readline().strip() # Read first line and strip whitespace
46+
if ":" not in line:
47+
raise ValueError("Invalid format: Expected 'user:password'")
48+
49+
return line.split(":", 1) # Split at the first colon
50+
51+
except FileNotFoundError:
52+
raise FileNotFoundError(f"File not found: {filepath}")
53+
except Exception as e:
54+
raise RuntimeError(f"Error reading {filepath}: {e}")
55+
56+
class KeycloakTokenAuth(HTTPBearerAuth):
57+
def __init__(self, server_url, realm, client_id, client_secret=None):
58+
logging.debug(f"Initializing {type(self)}")
59+
self.server_url = server_url
60+
self.realm = realm
61+
self.client_id = client_id
62+
self.client_secret = client_secret
63+
self.token_url = f"{server_url}/realms/{realm}/protocol/openid-connect/token"
64+
username, password = self._read_credentials()
65+
self.token = self._get_token(username, password)
66+
67+
def _read_credentials(self):
68+
n = netrc.netrc()
69+
host = urlparse(self.server_url).netloc
70+
creds = n.authenticators(host)
71+
return creds[0], creds[2]
72+
73+
def _get_token(self, username, password):
74+
data = {
75+
"grant_type": "password",
76+
"client_id": self.client_id,
77+
"username": username,
78+
"password": password,
79+
}
80+
81+
if self.client_secret:
82+
data["client_secret"] = self.client_secret
83+
84+
headers = {"Content-Type": "application/x-www-form-urlencoded"}
85+
response = requests.post(self.token_url, data=data, headers=headers)
86+
87+
if response.status_code == 200:
88+
access_token = response.json()["access_token"]
89+
#logging.debug(f"Got access token from {self.token_url} {access_token}")
90+
return access_token
91+
else:
92+
raise Exception(f"Failed to get token: {response.status_code}, {response.text}")

README.md

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,3 +37,30 @@ configurable parameters: `./register_stack.py -h`
3737

3838
**Authentication**: Basic auth is resolved automatically by the Requests library by reading a **~/.netrc** file. Make sure
3939
to set up the correct entries (Sentinel and STAC host URL) there.
40+
41+
# GSS user test activity
42+
Automation of the COPE-SRCO-PL-2400437 GSS user test activity v1.1.
43+
Perform Odata queries: Odata filters, queries by attributes and nodes inspection for some products per each product type in a random way.
44+
45+
## Installation
46+
```
47+
virtualenv .
48+
source bin/activate
49+
pip install -r requirements.txt
50+
```
51+
52+
## Usage
53+
Example:
54+
```
55+
python user-test-activity3.py -b
56+
```
57+
58+
Use with `-b` for basic auth file `.netrc` which contains `machine`, `login`, `password` records. See man curl.
59+
60+
Use with `-t` for basic auth file `.token` which contains single `token` line.
61+
62+
Use with `-k` for keycloak authentication. Credentials are read from the `.netrc`.
63+
64+
Use `-d` to increase verbosity. Specify multiple times to increase more.
65+
66+
Custom filters could be defined in `filters.txt` file.

filters.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
# You can use comments
2+
$filter=startswith(Name,'S1') and Online eq True&$format=json&$top=10
3+
$filter=startswith(Name,'S2') and Online eq True&$format=json&$top=10
4+
$filter=startswith(Name,'S3') and Online eq True&$format=json&$top=10
5+
$filter=startswith(Name,'S5P') and Online eq True&$format=json&$top=10
6+
$filter=not (Collection/Name eq 'SENTINEL-2') and not contains(Name,'OPER_AUX') and ContentDate/Start gt 2025-01-03T00:00:00.000Z and ContentDate/Start lt 2025-02-03T00:10:00.000Z&$orderby=ContentDate/Start&$top=30

user-test-activity3.py

Lines changed: 202 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,202 @@
1+
import requests
2+
import random
3+
import logging
4+
import os
5+
import argparse
6+
import HTTPAuthOptions
7+
8+
# OData service base URL
9+
#BASE_URL = "https://dhr1.cesnet.cz/odata/v2"
10+
#BASE_URL = "https://gss.dhr.metacentrum.cz/odata/v1"
11+
#BASE_URL = "https://dhs2.copernicus.eu/odatav4/odata/v2"
12+
BASE_URL = "https://collgs.cesnet.cz/odata/v1"
13+
14+
# Keycloak authentication data
15+
TOKEN_URL="https://dhs2.copernicus.eu/auth"
16+
REALM = "gss"
17+
CLIENT_ID="dhs2"
18+
19+
# Destination directory for downloads
20+
DOWNLOAD_DIR = "./tmp/"
21+
os.makedirs(DOWNLOAD_DIR, exist_ok=True)
22+
23+
MAX_PRODUCTS = 2
24+
25+
nodes_to_url = lambda node_ids: "/".join([f"Nodes('{node_id}')" for node_id in node_ids])
26+
27+
def get_products(auth, queries):
28+
"""Fetch products by given queries."""
29+
products_by_query = {}
30+
31+
for query in queries:
32+
response = requests.get(
33+
f"{BASE_URL}/Products?{query}",
34+
auth=auth
35+
)
36+
37+
if response.status_code == 200:
38+
data = response.json()
39+
products = data.get("value", [])
40+
41+
if products:
42+
products_by_query[query] = random.sample(products, min(MAX_PRODUCTS, len(products)))
43+
logging.info(f"Found {len(products_by_query[query])} products for type {query}.")
44+
else:
45+
logging.warning(f"No products found for type {query}.")
46+
else:
47+
logging.error(f"Failed to fetch products for {query}: {response.status_code} {response.text}")
48+
49+
return products_by_query
50+
51+
def download_value(entity, entity_id, auth, entity_type, node_ids=None):
52+
"""Download entity's $value (binary content) to tmp."""
53+
if entity_type == 'Nodes':
54+
url = f"{BASE_URL}/Products({entity_id})/{nodes_to_url(node_ids)}/$value"
55+
else:
56+
url = f"{BASE_URL}/{entity_type}({entity_id})/$value"
57+
response = requests.get(url, auth=auth, stream=True)
58+
59+
if response.status_code == 200:
60+
file_path = os.path.join(DOWNLOAD_DIR, f"{entity['Name']}")
61+
with open(file_path, "wb") as f:
62+
for chunk in response.iter_content(chunk_size=8192):
63+
f.write(chunk)
64+
logging.info(f"Downloaded {entity_type} {entity['Id']} to {file_path}")
65+
else:
66+
logging.error(f"Failed to download {entity_type} {entity['Id']} value: {response.status_code} {requests.status_codes._codes[response.status_code][0]}")
67+
68+
def inspect_nodes(auth, product_id, node_id, depth=0, max_depth=1):
69+
"""Recursively explore Nodes and download some of their $value."""
70+
if depth > max_depth:
71+
return
72+
73+
logging.info(f"Inspecting Node {node_id}, Depth {depth}")
74+
75+
node_entity_response = requests.get(
76+
f"{BASE_URL}/Products({product_id})/{nodes_to_url(node_id)}?$format=json",
77+
auth=auth
78+
)
79+
if node_entity_response.status_code == 200:
80+
node_entity = node_entity_response.json()
81+
if node_entity:
82+
logging.info(f" Node entity found for Node {node_id}.")
83+
inspect_child_nodes(auth, product_id, node_id, depth, max_depth)
84+
else:
85+
logging.warning(f" No node entity found for Node {node_id}.")
86+
else:
87+
logging.error(f" Failed to fetch node entity for Node {node_id}: {node_entity_response.status_code}")
88+
89+
# Fetch child nodes
90+
def inspect_child_nodes(auth, product_id, node_ids, depth=0, max_depth=1):
91+
node_response = requests.get(
92+
f"{BASE_URL}/Products({product_id})/{nodes_to_url(node_ids)}/Nodes?$format=json",
93+
auth=auth
94+
)
95+
96+
if node_response.status_code == 200:
97+
nodes = node_response.json().get("value", [])
98+
if nodes:
99+
logging.info(f" Found {len(nodes)} child nodes for Node {node_ids}")
100+
101+
# Randomly select a few nodes to download
102+
selected_nodes = random.sample(nodes, min(2, len(nodes)))
103+
for node in selected_nodes:
104+
node_id = node["Id"]
105+
#download_value(node, product_id, auth, "Nodes", node_ids)
106+
107+
# Recursively go deeper
108+
inspect_nodes(auth, product_id, node_ids + [node_id], depth + 1, max_depth)
109+
else:
110+
logging.warning(f" No child nodes found for Node {node_ids}")
111+
else:
112+
logging.error(f" Failed to fetch nodes for Node {node_ids}: {node_response.status_code}")
113+
114+
def inspect_products(auth, products_by_query):
115+
"""Fetch and log attributes, nodes, and download $value for selected products."""
116+
for query, products in products_by_query.items():
117+
logging.info(f"Inspecting Product Result: {query}")
118+
119+
for product in products:
120+
product_id = product["Id"]
121+
product_name = product["Name"]
122+
logging.info(f"Product ID: {product_id}, Name: {product_name}")
123+
124+
# Download product $value
125+
download_value(product, product_id, auth, "Products")
126+
127+
# Get attributes
128+
attr_response = requests.get(
129+
f"{BASE_URL}/Products({product_id})/Attributes?$format=json",
130+
auth=auth
131+
)
132+
if attr_response.status_code == 200:
133+
attributes = attr_response.json().get("value", [])
134+
if attributes:
135+
logging.info(f" Attributes found for Product {product_id}.")
136+
else:
137+
logging.warning(f" No attributes found for Product {product_id}.")
138+
else:
139+
logging.error(f" Failed to fetch attributes for Product {product_id}: {attr_response.status_code}")
140+
141+
# Get nodes
142+
node_response = requests.get(
143+
f"{BASE_URL}/Products({product_id})/Nodes?$format=json",
144+
auth=auth
145+
)
146+
if node_response.status_code == 200:
147+
nodes = node_response.json().get("value", [])
148+
if nodes:
149+
logging.info(f"Found {len(nodes)} nodes for Product {product_id}")
150+
151+
# Select a random node and walk deeper
152+
random_node = random.choice(nodes)
153+
inspect_nodes(auth, product_id, [random_node["Id"]])
154+
else:
155+
logging.warning(f"No nodes found for Product {product_id}")
156+
else:
157+
logging.error(f" Failed to fetch nodes for Product {product_id}: {node_response.status_code}")
158+
159+
160+
161+
if __name__ == "__main__":
162+
parser = argparse.ArgumentParser()
163+
164+
# Create a mutually exclusive group
165+
group = parser.add_mutually_exclusive_group(required=True)
166+
group.add_argument("-b", action="store_true", help="Use basic authentication (.basic-auth file)")
167+
group.add_argument("-t", action="store_true", help="Use token authentication (.token file)")
168+
group.add_argument("-k", action="store_true", help="Use keycloak authentication (.basic-auth file)")
169+
170+
parser.add_argument("-d", action="count", default=0, help="Increase logging verbosity (-d: INFO, -dd: DEBUG)")
171+
172+
# Parse arguments
173+
args = parser.parse_args()
174+
175+
# Set logging level based on occurrences of -d
176+
if args.d >= 2:
177+
log_level = logging.DEBUG
178+
elif args.d == 1:
179+
log_level = logging.INFO
180+
else:
181+
log_level = logging.WARNING
182+
183+
# Configure logging
184+
logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(message)s")
185+
186+
if args.k:
187+
auth = HTTPAuthOptions.KeycloakTokenAuth(server_url=TOKEN_URL, realm=REALM, client_id=CLIENT_ID)
188+
elif args.t:
189+
auth = HTTPAuthOptions.HTTPBearerAuth()
190+
else:
191+
#auth = HTTPAuthOptions.FileBasedBasicAuth()
192+
auth = None
193+
194+
with open("filters.txt", "r") as file:
195+
lines = file.readlines()
196+
queries = [line.strip() for line in lines if line.strip() and not line.lstrip().startswith("#")]
197+
198+
logging.info("Starting OData queries...")
199+
products_by_query = get_products(auth, queries)
200+
inspect_products(auth, products_by_query)
201+
202+
logging.info("OData queries completed successfully.")

0 commit comments

Comments
 (0)