diff --git a/404.html b/404.html index 1296a2b4..8185dfda 100644 --- a/404.html +++ b/404.html @@ -174,18 +174,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/config/config/index.html b/config/config/index.html index afcd5853..08aa9b90 100644 --- a/config/config/index.html +++ b/config/config/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/config/index.html b/config/index.html index f499d77d..6fa1337d 100644 --- a/config/index.html +++ b/config/index.html @@ -183,18 +183,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/about-endpoints/index.html b/endpoints/about-endpoints/index.html index 8d952615..fd9b4b24 100644 --- a/endpoints/about-endpoints/index.html +++ b/endpoints/about-endpoints/index.html @@ -183,18 +183,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/asset_tanks/index.html b/endpoints/asset_tanks/index.html index 01b40583..38a0872b 100644 --- a/endpoints/asset_tanks/index.html +++ b/endpoints/asset_tanks/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/attributes/index.html b/endpoints/attributes/index.html index 80d233b6..fd295d17 100644 --- a/endpoints/attributes/index.html +++ b/endpoints/attributes/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/cargo_movements/index.html b/endpoints/cargo_movements/index.html index 17fcb17a..e30eb09a 100644 --- a/endpoints/cargo_movements/index.html +++ b/endpoints/cargo_movements/index.html @@ -183,18 +183,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • @@ -250,7 +238,7 @@

    CargoMovements

    A detailed explanation of Cargo/Vessel Movements can be found here.

    search

    -
    CargoMovements.search(self, filter_activity: str = None, filter_time_min: datetime.datetime = datetime.datetime(2023, 9, 7, 10, 23, 46, 776785), filter_time_max: datetime.datetime = datetime.datetime(2023, 9, 7, 10, 23, 46, 776788), cm_unit: str = 'b', filter_charterers: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, disable_geographic_exclusion_rules: bool = None) -> vortexasdk.endpoints.cargo_movements_result.CargoMovementsResult
    +
    CargoMovements.search(self, filter_activity: str = None, filter_time_min: datetime.datetime = datetime.datetime(2023, 9, 11, 11, 19, 18, 998274), filter_time_max: datetime.datetime = datetime.datetime(2023, 9, 11, 11, 19, 18, 998277), cm_unit: str = 'b', filter_charterers: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, disable_geographic_exclusion_rules: bool = None) -> vortexasdk.endpoints.cargo_movements_result.CargoMovementsResult
     

    Find CargoMovements matching the given search parameters.

    Arguments

    diff --git a/endpoints/cargo_timeseries/index.html b/endpoints/cargo_timeseries/index.html index 76c25338..4844fff0 100644 --- a/endpoints/cargo_timeseries/index.html +++ b/endpoints/cargo_timeseries/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/corporations/index.html b/endpoints/corporations/index.html index 4b904096..ca92bfd8 100644 --- a/endpoints/corporations/index.html +++ b/endpoints/corporations/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/destination_breakdown/index.html b/endpoints/destination_breakdown/index.html index fb7d7668..ccf83237 100644 --- a/endpoints/destination_breakdown/index.html +++ b/endpoints/destination_breakdown/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/eia_forecasts/index.html b/endpoints/eia_forecasts/index.html index b707c386..7efa2ea5 100644 --- a/endpoints/eia_forecasts/index.html +++ b/endpoints/eia_forecasts/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/fixtures/index.html b/endpoints/fixtures/index.html index 4e5345e3..3850ef55 100644 --- a/endpoints/fixtures/index.html +++ b/endpoints/fixtures/index.html @@ -183,18 +183,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/freight_pricing_search/index.html b/endpoints/freight_pricing_search/index.html index ab17158b..3cbc4ee0 100644 --- a/endpoints/freight_pricing_search/index.html +++ b/endpoints/freight_pricing_search/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/freight_pricing_timeseries/index.html b/endpoints/freight_pricing_timeseries/index.html index 490b6465..79f814e7 100644 --- a/endpoints/freight_pricing_timeseries/index.html +++ b/endpoints/freight_pricing_timeseries/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/geographies/index.html b/endpoints/geographies/index.html index 6ec7be84..9238474e 100644 --- a/endpoints/geographies/index.html +++ b/endpoints/geographies/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/movement_status_breakdown/index.html b/endpoints/movement_status_breakdown/index.html index 5b865d97..ea529931 100644 --- a/endpoints/movement_status_breakdown/index.html +++ b/endpoints/movement_status_breakdown/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/onshore_inventories_search/index.html b/endpoints/onshore_inventories_search/index.html index 92a0a63b..af2e02bb 100644 --- a/endpoints/onshore_inventories_search/index.html +++ b/endpoints/onshore_inventories_search/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • @@ -252,7 +240,7 @@

    search

    -
    OnshoreInventoriesSearch.search(self, corporate_entity_ids: Union[str, List[str]] = None, crude_confidence: List[str] = None, location_ids: Union[str, List[str]] = None, storage_types: List[str] = None, time_min: datetime.datetime = datetime.datetime(2023, 8, 31, 10, 23, 46, 804267), time_max: datetime.datetime = datetime.datetime(2023, 9, 7, 10, 23, 46, 804281)) -> vortexasdk.endpoints.onshore_inventories_result.OnshoreInventoriesResult
    +
    OnshoreInventoriesSearch.search(self, corporate_entity_ids: Union[str, List[str]] = None, crude_confidence: List[str] = None, location_ids: Union[str, List[str]] = None, storage_types: List[str] = None, time_min: datetime.datetime = datetime.datetime(2023, 9, 4, 11, 19, 19, 15269), time_max: datetime.datetime = datetime.datetime(2023, 9, 11, 11, 19, 19, 15278)) -> vortexasdk.endpoints.onshore_inventories_result.OnshoreInventoriesResult
     

    List of crude onshore inventories across the globe.

    Arguments

    diff --git a/endpoints/onshore_inventories_timeseries/index.html b/endpoints/onshore_inventories_timeseries/index.html index 818d8412..b988bb6d 100644 --- a/endpoints/onshore_inventories_timeseries/index.html +++ b/endpoints/onshore_inventories_timeseries/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/origin_breakdown/index.html b/endpoints/origin_breakdown/index.html index f8ff12f3..e56a4b38 100644 --- a/endpoints/origin_breakdown/index.html +++ b/endpoints/origin_breakdown/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/parent_product_breakdown/index.html b/endpoints/parent_product_breakdown/index.html index be779e05..e5d6bb0d 100644 --- a/endpoints/parent_product_breakdown/index.html +++ b/endpoints/parent_product_breakdown/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/product_breakdown/index.html b/endpoints/product_breakdown/index.html index c7e5700a..b4f5d4f4 100644 --- a/endpoints/product_breakdown/index.html +++ b/endpoints/product_breakdown/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/products/index.html b/endpoints/products/index.html index 82b3fb47..6fd78ae2 100644 --- a/endpoints/products/index.html +++ b/endpoints/products/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/storage_terminals/index.html b/endpoints/storage_terminals/index.html index cf899e03..ac567b68 100644 --- a/endpoints/storage_terminals/index.html +++ b/endpoints/storage_terminals/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/vessel_availability_breakdown/index.html b/endpoints/vessel_availability_breakdown/index.html index f5b0f69d..80c642ab 100644 --- a/endpoints/vessel_availability_breakdown/index.html +++ b/endpoints/vessel_availability_breakdown/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/vessel_availability_search/index.html b/endpoints/vessel_availability_search/index.html index f2e6a3db..f6b44ffe 100644 --- a/endpoints/vessel_availability_search/index.html +++ b/endpoints/vessel_availability_search/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/vessel_availability_timeseries/index.html b/endpoints/vessel_availability_timeseries/index.html index 15294b88..58724b1f 100644 --- a/endpoints/vessel_availability_timeseries/index.html +++ b/endpoints/vessel_availability_timeseries/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/vessel_class_breakdown/index.html b/endpoints/vessel_class_breakdown/index.html index 1d77bb90..4029d3cc 100644 --- a/endpoints/vessel_class_breakdown/index.html +++ b/endpoints/vessel_class_breakdown/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/vessels/index.html b/endpoints/vessels/index.html index 50e4d1f7..83648e41 100644 --- a/endpoints/vessels/index.html +++ b/endpoints/vessels/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/voyages_congestion_breakdown/index.html b/endpoints/voyages_congestion_breakdown/index.html index 30ca1f47..06ffbef6 100644 --- a/endpoints/voyages_congestion_breakdown/index.html +++ b/endpoints/voyages_congestion_breakdown/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/voyages_search_enriched/index.html b/endpoints/voyages_search_enriched/index.html index 9b4c4961..f3cdc105 100644 --- a/endpoints/voyages_search_enriched/index.html +++ b/endpoints/voyages_search_enriched/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/voyages_timeseries/index.html b/endpoints/voyages_timeseries/index.html index 7710b4c3..16ea06dc 100644 --- a/endpoints/voyages_timeseries/index.html +++ b/endpoints/voyages_timeseries/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/endpoints/voyages_top_hits/index.html b/endpoints/voyages_top_hits/index.html index 2a0c69c8..46b496fb 100644 --- a/endpoints/voyages_top_hits/index.html +++ b/endpoints/voyages_top_hits/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/breakdown_item/index.html b/entities/breakdown_item/index.html index 26fb379b..0841c42d 100644 --- a/entities/breakdown_item/index.html +++ b/entities/breakdown_item/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/cargo_movement/index.html b/entities/cargo_movement/index.html index f41d5cd2..8bf935d1 100644 --- a/entities/cargo_movement/index.html +++ b/entities/cargo_movement/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/corporation/index.html b/entities/corporation/index.html index 3e137fdc..fe85c940 100644 --- a/entities/corporation/index.html +++ b/entities/corporation/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/geography/index.html b/entities/geography/index.html index 3a40bd3b..36500d0f 100644 --- a/entities/geography/index.html +++ b/entities/geography/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/product/index.html b/entities/product/index.html index 3191e2ca..b2deffa7 100644 --- a/entities/product/index.html +++ b/entities/product/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/timeseries_item/index.html b/entities/timeseries_item/index.html index bd2016fc..bbd71c05 100644 --- a/entities/timeseries_item/index.html +++ b/entities/timeseries_item/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/vessel/index.html b/entities/vessel/index.html index aaa0da63..e7193b5c 100644 --- a/entities/vessel/index.html +++ b/entities/vessel/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/vessel_availability/index.html b/entities/vessel_availability/index.html index 53a3cdbb..03e8cfe8 100644 --- a/entities/vessel_availability/index.html +++ b/entities/vessel_availability/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/entities/voyages/index.html b/entities/voyages/index.html index b6027c7a..5a43179b 100644 --- a/entities/voyages/index.html +++ b/entities/voyages/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/examples/0_sample_load_cargo_movements/index.html b/examples/0_sample_load_cargo_movements/index.html index 0cd63a5e..7cf959ce 100644 --- a/examples/0_sample_load_cargo_movements/index.html +++ b/examples/0_sample_load_cargo_movements/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/examples/1_china/index.html b/examples/1_china/index.html index 3ddd5f32..92abdeee 100644 --- a/examples/1_china/index.html +++ b/examples/1_china/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/examples/2_crude_from_saudi_arabia_to_india/index.html b/examples/2_crude_from_saudi_arabia_to_india/index.html index 641dca2b..4ed709bb 100644 --- a/examples/2_crude_from_saudi_arabia_to_india/index.html +++ b/examples/2_crude_from_saudi_arabia_to_india/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/examples/3_chinese_daily_imports/index.html b/examples/3_chinese_daily_imports/index.html index e36104ef..6ef71ee3 100644 --- a/examples/3_chinese_daily_imports/index.html +++ b/examples/3_chinese_daily_imports/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/examples/4_medium_sour_floating_storage/index.html b/examples/4_medium_sour_floating_storage/index.html index 6893fa6a..bc11a768 100644 --- a/examples/4_medium_sour_floating_storage/index.html +++ b/examples/4_medium_sour_floating_storage/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/examples/jupyter_notebooks/index.html b/examples/jupyter_notebooks/index.html index bb2fb944..d4be44bb 100644 --- a/examples/jupyter_notebooks/index.html +++ b/examples/jupyter_notebooks/index.html @@ -181,18 +181,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
    diff --git a/faq/index.html b/faq/index.html index 6dce7d9a..3e9dbb8e 100644 --- a/faq/index.html +++ b/faq/index.html @@ -183,18 +183,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/faq_setup/index.html b/faq_setup/index.html index c1656e9e..a90ececf 100644 --- a/faq_setup/index.html +++ b/faq_setup/index.html @@ -183,18 +183,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/index.html b/index.html index 7f6dfe7c..1f76ff77 100644 --- a/index.html +++ b/index.html @@ -203,18 +203,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • @@ -448,5 +436,5 @@

    Contributing

    diff --git a/search.html b/search.html index 50ac65d8..68578e7a 100644 --- a/search.html +++ b/search.html @@ -174,18 +174,6 @@
  • 1 China VLCC Discharges
  • -
  • 2 Laden Vessel Movements -
  • -
  • 3 Crude movements from Saudi Arabia to India -
  • -
  • 4 Ballast Movements -
  • -
  • 5 Chinese Daily Crude Imports -
  • -
  • 6 Floating Storage Medium Sour Crude -
  • -
  • 7 Ingest Custom Excel Charterers -
  • Jupyter Notebooks
  • diff --git a/search/search_index.json b/search/search_index.json index 955a5c29..d8a0113e 100644 --- a/search/search_index.json +++ b/search/search_index.json @@ -1 +1 @@ -{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"","text":"VortexaSDK Welcome to Vortexa's Python Software Development Kit (SDK)! We built the SDK to provide fast, interactive, programmatic exploration of our data. The tool lets Data Scientists, Analysts and Developers efficiently explore the world\u2019s waterborne oil movements, and to build custom models & reports with minimum setup cost. The SDK sits as a thin python wrapper around Vortexa's API , giving you immediate access to pandas DataFrames. Example In an interactive Python console, run: >>> from datetime import datetime >>> from vortexasdk import CargoMovements >>> df = CargoMovements()\\ .search(filter_activity='loading_state', filter_time_min=datetime(2017, 8, 2), filter_time_max=datetime(2017, 8, 3))\\ .to_df() returns: quantity vessels.0.name product.group.label product.grade.label events.cargo_port_load_event.0.end_timestamp events.cargo_port_unload_event.0.start_timestamp 0 1998 ALSIA SWAN Clean products Lube Oils 2017-08-01T06:10:45+0000 2017-08-27T14:38:15+0000 1 16559 IVER Dirty products nan 2017-08-02T17:20:51+0000 2017-09-07T07:52:20+0000 2 522288 BLUE SUN Crude Gharib 2017-08-02T04:22:09+0000 2017-08-13T10:32:09+0000 Quick Start Try me out in your browser: Installation $ pip install vortexasdk The SDK requires Python versions between 3.7 and 3.10. See Setup FAQ for more details. To install the SDK on an Apple ARM-based machine, use Python versions between 3.7 and 3.10 and use the latest version of pip. This is supported in the SDK versions 0.41.0 or higher. Authentication Set your VORTEXA_API_KEY environment variable, that's all. Alternatively, the SDK prompts to you enter your API Key when running a script interactively. To get an API key and experiment with Vortexa's data, you can request a demo here . Check Setup To check the SDK is setup correctly, run the following in a bash console: $ python -m vortexasdk.check_setup A successful setup looks like this: Next Steps Learn how to call Endpoints Glossary The Glossary can be found at Vortexa API Documentation . The Glossary outlines key terms, functions and assumptions aimed at helping to extract powerful findings from our data. Documentation Read the documentation at VortexaSDK Docs Contributing We welcome contributions! Please read our Contributing Guide for ways to offer feedback and contributions. Thanks goes to these wonderful contributors ( emoji key ): Kit Burgess \ud83c\udfa8 \ud83d\udcbb tinovs \ud83d\udcbb \ud83d\udc40 David Andrew Starkey \ud83d\udcbb \ud83d\udcd6 \ud83d\udca1 syed \ud83d\udc40 Jakub Korzeniowski \ud83e\udd14 Edward Wright \ud83d\udcd3 Patrick Roddy \ud83d\udcd3 Romain \ud83d\udcd3 \ud83e\udd14 Natday \ud83d\udcbc \ud83e\udd14 \ud83d\udcd3 ArthurD1 \ud83d\udcd3 Chloe Connor \ud83d\udcd3 Achilleas Sfakianakis \ud83d\udcd3 Sean Barry \ud83d\udcbb \ud83d\udcd6 Konrad Moskal \ud83d\udcbb Pawel Pietruszka \ud83d\udcbb This project follows the all-contributors specification. Contributions of any kind welcome!","title":"Home"},{"location":"#vortexasdk","text":"Welcome to Vortexa's Python Software Development Kit (SDK)! We built the SDK to provide fast, interactive, programmatic exploration of our data. The tool lets Data Scientists, Analysts and Developers efficiently explore the world\u2019s waterborne oil movements, and to build custom models & reports with minimum setup cost. The SDK sits as a thin python wrapper around Vortexa's API , giving you immediate access to pandas DataFrames.","title":"VortexaSDK"},{"location":"#example","text":"In an interactive Python console, run: >>> from datetime import datetime >>> from vortexasdk import CargoMovements >>> df = CargoMovements()\\ .search(filter_activity='loading_state', filter_time_min=datetime(2017, 8, 2), filter_time_max=datetime(2017, 8, 3))\\ .to_df() returns: quantity vessels.0.name product.group.label product.grade.label events.cargo_port_load_event.0.end_timestamp events.cargo_port_unload_event.0.start_timestamp 0 1998 ALSIA SWAN Clean products Lube Oils 2017-08-01T06:10:45+0000 2017-08-27T14:38:15+0000 1 16559 IVER Dirty products nan 2017-08-02T17:20:51+0000 2017-09-07T07:52:20+0000 2 522288 BLUE SUN Crude Gharib 2017-08-02T04:22:09+0000 2017-08-13T10:32:09+0000","title":"Example"},{"location":"#quick-start","text":"Try me out in your browser:","title":"Quick Start"},{"location":"#installation","text":"$ pip install vortexasdk The SDK requires Python versions between 3.7 and 3.10. See Setup FAQ for more details. To install the SDK on an Apple ARM-based machine, use Python versions between 3.7 and 3.10 and use the latest version of pip. This is supported in the SDK versions 0.41.0 or higher.","title":"Installation"},{"location":"#authentication","text":"Set your VORTEXA_API_KEY environment variable, that's all. Alternatively, the SDK prompts to you enter your API Key when running a script interactively. To get an API key and experiment with Vortexa's data, you can request a demo here .","title":"Authentication"},{"location":"#check-setup","text":"To check the SDK is setup correctly, run the following in a bash console: $ python -m vortexasdk.check_setup A successful setup looks like this:","title":"Check Setup"},{"location":"#next-steps","text":"Learn how to call Endpoints","title":"Next Steps"},{"location":"#glossary","text":"The Glossary can be found at Vortexa API Documentation . The Glossary outlines key terms, functions and assumptions aimed at helping to extract powerful findings from our data.","title":"Glossary"},{"location":"#documentation","text":"Read the documentation at VortexaSDK Docs","title":"Documentation"},{"location":"#contributing","text":"We welcome contributions! Please read our Contributing Guide for ways to offer feedback and contributions. Thanks goes to these wonderful contributors ( emoji key ): Kit Burgess \ud83c\udfa8 \ud83d\udcbb tinovs \ud83d\udcbb \ud83d\udc40 David Andrew Starkey \ud83d\udcbb \ud83d\udcd6 \ud83d\udca1 syed \ud83d\udc40 Jakub Korzeniowski \ud83e\udd14 Edward Wright \ud83d\udcd3 Patrick Roddy \ud83d\udcd3 Romain \ud83d\udcd3 \ud83e\udd14 Natday \ud83d\udcbc \ud83e\udd14 \ud83d\udcd3 ArthurD1 \ud83d\udcd3 Chloe Connor \ud83d\udcd3 Achilleas Sfakianakis \ud83d\udcd3 Sean Barry \ud83d\udcbb \ud83d\udcd6 Konrad Moskal \ud83d\udcbb Pawel Pietruszka \ud83d\udcbb This project follows the all-contributors specification. Contributions of any kind welcome!","title":"Contributing"},{"location":"config/","text":"Config The VortexaSDK can be configured using environment variables. Environment Variable Default Description VORTEXA_API_KEY none API Key used to access the VortexaAPI. Refer to Vortexa API Authentication for more details, including instructions on where to find your API key. LOG_FILE none Output log file LOG_LEVEL INFO Configure the level of must be one of [\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"] HTTP_PROXY none Send API requests via a corporate http proxy. This environment variable is used by the requests library, see here for further details HTTPS_PROXY none Send API requests via a corporate https proxy. This environment variable is used by the requests library, see here for further details","title":"Config"},{"location":"config/#config","text":"The VortexaSDK can be configured using environment variables. Environment Variable Default Description VORTEXA_API_KEY none API Key used to access the VortexaAPI. Refer to Vortexa API Authentication for more details, including instructions on where to find your API key. LOG_FILE none Output log file LOG_LEVEL INFO Configure the level of must be one of [\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"] HTTP_PROXY none Send API requests via a corporate http proxy. This environment variable is used by the requests library, see here for further details HTTPS_PROXY none Send API requests via a corporate https proxy. This environment variable is used by the requests library, see here for further details","title":"Config"},{"location":"faq/","text":"Why do my requests hang or run in an infinite loop or I get a RuntimeError (\"An attempt has been made to start a new process...\")? On Windows, it may be that your script doesn't check if __name__ == \"__main__\": before calling the SDK. Your script should contain this check, just like in this example For more details on why if __name__ == \"__main__\": is required, check out those interesting stack overflow posts here and here on Windows multiprocessing. How do I use the SDK with a corporate proxy? To send SDK requests via a proxy, you can set the HTTP_PROXY or HTTPS_PROXY environment variables. More detail is given in the requests library docs here What's the difference between a cargo movement and a vessel movement? A cargo movement is defined as the complete journey of a quantity of oil from its origin terminal to its destination terminal, including all ship to ship (STS) transfers in-between. For example: Tanker X loads 1mn bl of crude from Houston and discharges onto another tanker Y offshore the US Gulf, which then discharges in Singapore. The cargo movement is for 1mn bl of crude oil from Houston to Singapore. The vessel movement for tanker X is Houston to US Gulf, while for tanker Y it is US Gulf to Singapore. When there is no STS transfer, a cargo movement and vessel movement is equivalent. A more detailed explanation can be found here Where can I find a list of products? Check out the Vortexa Glossary, which can be downloaded from here What's the difference in a trading region and a geographic region? Trading regions have been designed by Vortexa to try and group terminals, ports and countries around oil market conventions (e.g. northwest Europe, west Africa, etc) whereas geographic regions are much wider in scope (e.g. North America, Asia, Africa). Trading regions are more granular than geographic regions.","title":"General"},{"location":"faq/#why-do-my-requests-hang-or-run-in-an-infinite-loop-or-i-get-a-runtimeerror-an-attempt-has-been-made-to-start-a-new-process","text":"On Windows, it may be that your script doesn't check if __name__ == \"__main__\": before calling the SDK. Your script should contain this check, just like in this example For more details on why if __name__ == \"__main__\": is required, check out those interesting stack overflow posts here and here on Windows multiprocessing.","title":"Why do my requests hang or run in an infinite loop or I get a RuntimeError (\"An attempt has been made to start a new process...\")?"},{"location":"faq/#how-do-i-use-the-sdk-with-a-corporate-proxy","text":"To send SDK requests via a proxy, you can set the HTTP_PROXY or HTTPS_PROXY environment variables. More detail is given in the requests library docs here","title":"How do I use the SDK with a corporate proxy?"},{"location":"faq/#whats-the-difference-between-a-cargo-movement-and-a-vessel-movement","text":"A cargo movement is defined as the complete journey of a quantity of oil from its origin terminal to its destination terminal, including all ship to ship (STS) transfers in-between. For example: Tanker X loads 1mn bl of crude from Houston and discharges onto another tanker Y offshore the US Gulf, which then discharges in Singapore. The cargo movement is for 1mn bl of crude oil from Houston to Singapore. The vessel movement for tanker X is Houston to US Gulf, while for tanker Y it is US Gulf to Singapore. When there is no STS transfer, a cargo movement and vessel movement is equivalent. A more detailed explanation can be found here","title":"What's the difference between a cargo movement and a vessel movement?"},{"location":"faq/#where-can-i-find-a-list-of-products","text":"Check out the Vortexa Glossary, which can be downloaded from here","title":"Where can I find a list of products?"},{"location":"faq/#whats-the-difference-in-a-trading-region-and-a-geographic-region","text":"Trading regions have been designed by Vortexa to try and group terminals, ports and countries around oil market conventions (e.g. northwest Europe, west Africa, etc) whereas geographic regions are much wider in scope (e.g. North America, Asia, Africa). Trading regions are more granular than geographic regions.","title":"What's the difference in a trading region and a geographic region?"},{"location":"faq_setup/","text":"How to install Python on Windows using Anaconda Download the Python3.7 Graphical installer from the anaconda website Follow the conda installation instructions How do I install the SDK on Windows? First, open up an Anaconda Prompt. Hit the start button and type anaconda prompt. Use pip to install the sdk Run pip install --user vortexasdk in the anaconda command prompt You're done! The VortexaSDK has now been installed. How do I install the SDK on Mac / Linux? Type the following into a bash terminal $ pip install vortexasdk How do I add an environment variable on Windows? Hit the windows key, then type \"environment\" to open up a control panel settings page titled \"Edit the system environment variables\" In the System Properties window, click on the Advanced tab, then click the Environment Variables button near the bottom of that tab. Add a new user variable Where is my API Key? Refer to Vortexa API Authentication for details, including instructions on where to find your API key. How do I request an API Key? You can request a demo here . More details are given in docs.vortexa.com . Alternatively, please get in touch at www.vortexa.com . How can I check the SDK is setup correctly? Run the following in a bash console on Mac/Linux, or command prompt on Windows: $ python -m vortexasdk.check_setup A successful setup looks like this: On Windows, you'll need to paste the API key by right clicking the console menu, like so: Hitting Ctrl+V won't paste the API key, this is due to a known python windows bug","title":"Setup"},{"location":"faq_setup/#how-to-install-python-on-windows-using-anaconda","text":"Download the Python3.7 Graphical installer from the anaconda website Follow the conda installation instructions","title":"How to install Python on Windows using Anaconda"},{"location":"faq_setup/#how-do-i-install-the-sdk-on-windows","text":"First, open up an Anaconda Prompt. Hit the start button and type anaconda prompt. Use pip to install the sdk Run pip install --user vortexasdk in the anaconda command prompt You're done! The VortexaSDK has now been installed.","title":"How do I install the SDK on Windows?"},{"location":"faq_setup/#how-do-i-install-the-sdk-on-mac-linux","text":"Type the following into a bash terminal $ pip install vortexasdk","title":"How do I install the SDK on Mac / Linux?"},{"location":"faq_setup/#how-do-i-add-an-environment-variable-on-windows","text":"Hit the windows key, then type \"environment\" to open up a control panel settings page titled \"Edit the system environment variables\" In the System Properties window, click on the Advanced tab, then click the Environment Variables button near the bottom of that tab. Add a new user variable","title":"How do I add an environment variable on Windows?"},{"location":"faq_setup/#where-is-my-api-key","text":"Refer to Vortexa API Authentication for details, including instructions on where to find your API key.","title":"Where is my API Key?"},{"location":"faq_setup/#how-do-i-request-an-api-key","text":"You can request a demo here . More details are given in docs.vortexa.com . Alternatively, please get in touch at www.vortexa.com .","title":"How do I request an API Key?"},{"location":"faq_setup/#how-can-i-check-the-sdk-is-setup-correctly","text":"Run the following in a bash console on Mac/Linux, or command prompt on Windows: $ python -m vortexasdk.check_setup A successful setup looks like this: On Windows, you'll need to paste the API key by right clicking the console menu, like so: Hitting Ctrl+V won't paste the API key, this is due to a known python windows bug","title":"How can I check the SDK is setup correctly?"},{"location":"config/config/","text":"Config The VortexaSDK can be configured using environment variables. Environment Variable Default Description VORTEXA_API_KEY none API Key used to access the VortexaAPI. Refer to Vortexa API Authentication for more details, including instructions on where to find your API key. LOG_FILE none Output log file LOG_LEVEL INFO Configure the level of must be one of [\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"] HTTP_PROXY none Send API requests via a corporate http proxy. This environment variable is used by the requests library, see here for further details HTTPS_PROXY none Send API requests via a corporate https proxy. This environment variable is used by the requests library, see here for further details","title":"Config"},{"location":"config/config/#config","text":"The VortexaSDK can be configured using environment variables. Environment Variable Default Description VORTEXA_API_KEY none API Key used to access the VortexaAPI. Refer to Vortexa API Authentication for more details, including instructions on where to find your API key. LOG_FILE none Output log file LOG_LEVEL INFO Configure the level of must be one of [\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"] HTTP_PROXY none Send API requests via a corporate http proxy. This environment variable is used by the requests library, see here for further details HTTPS_PROXY none Send API requests via a corporate https proxy. This environment variable is used by the requests library, see here for further details","title":"Config"},{"location":"endpoints/about-endpoints/","text":"About VortexaSDK Endpoints The endpoints module allows you to query Vortexa's data. The VortexaSDK currently contains the following endpoints: Cargo Movements Voyages Charterers Geographies Products Vessels Cargo Time Series EIA Forecasts Tonne-miles Vessel Availability Crude Onshore Inventories Freight Pricing Each endpoint offers either one, or both, of two different functionalities: Lookup by ID . Retrieve an object matching a certain id. In sql speak this is the equivalent of SELECT * FROM vessels WHERE id = 12345; Search . Retrieve a number of objects matching given search parameters. In sql speak this is the equivalent of SELECT * FROM vessels WHERE name ~* 'ocean' AND vessel_class = 'vlcc'; Let's explain with some examples: Find all aframax vessels from vortexasdk import Vessels df = Vessels().search(vessel_classes='aframax').to_df() Find the vessel that has with id 12345 vessel = Vessels().reference(id='12345')","title":"About Endpoints"},{"location":"endpoints/about-endpoints/#about-vortexasdk-endpoints","text":"The endpoints module allows you to query Vortexa's data. The VortexaSDK currently contains the following endpoints: Cargo Movements Voyages Charterers Geographies Products Vessels Cargo Time Series EIA Forecasts Tonne-miles Vessel Availability Crude Onshore Inventories Freight Pricing Each endpoint offers either one, or both, of two different functionalities: Lookup by ID . Retrieve an object matching a certain id. In sql speak this is the equivalent of SELECT * FROM vessels WHERE id = 12345; Search . Retrieve a number of objects matching given search parameters. In sql speak this is the equivalent of SELECT * FROM vessels WHERE name ~* 'ocean' AND vessel_class = 'vlcc'; Let's explain with some examples: Find all aframax vessels from vortexasdk import Vessels df = Vessels().search(vessel_classes='aframax').to_df() Find the vessel that has with id 12345 vessel = Vessels().reference(id='12345')","title":"About VortexaSDK Endpoints"},{"location":"endpoints/asset_tanks/","text":"vortexasdk.endpoints.asset_tanks Try me out in your browser: AssetTanks AssetTanks(self) Asset Tanks endpoint. An Asset Tank is a reference value that corresponds to an ID associated with other entities. For example, an Asset Tank object may have the following keys: { \"name\": \"AAM001\", \"storage_type\": \"tdb\" \"crude_confidence\": \"confirmed\" ... } IDs represent asset tanks which can be found via the Asset Tank reference endpoint. When the asset tanks endpoint is searched with those ids as parameters: >>> from vortexasdk import AssetTanks >>> df = AssetTanks().search(ids=[\"6114b93026e61993797db33a46a5d2acbeacdbd63238a4271efaeafcee94b1d2\"]).to_df() Returns id capacity_bbl crude_confidence location_id name storage_type lat lon 0 6114b93026e61993797d... 645201 confirmed b839dc5fee39ff7efd5e1cf2494... AAM001 tbd 90 180 load_all AssetTanks.load_all(self) -> vortexasdk.endpoints.asset_tanks_result.AssetTankResult Load all asset tanks. search AssetTanks.search(self, ids: Union[str, List[str]] = None, corporate_entity_ids: Union[str, List[str]] = None, crude_confidence: List[str] = None, location_ids: Union[str, List[str]] = None, storage_type: List[str] = None, term: Union[str, List[str]] = None) -> vortexasdk.endpoints.asset_tanks_result.AssetTankResult Find all asset tanks matching given type. Arguments ids : An array of unique Asset Tanks ID(s) to filter on. corporate_entity_ids : An array of owner ID(s) to filter on. crude_confidence : An array of confidence metrics to filter on. Possible values are: 'confirmed\u2019 , \u2018probable\u2019 , \u2018unlikely\u2019 location_ids : An array of geography ID(s) to filter on. storage_types : An array of storage types to filter on. Possible values are: 'refinery' , 'non-refinery' , 'commercial' , 'spr' , 'tbd' Returns List of asset tanks matching type Examples Find all asset tanks with a storage_type of refinery . >>> from vortexasdk import AssetTanks >>> df = AssetTanks().search(storage_type=[\"refinery\"]).to_df() Returns id capacity_bbl crude_confidence location_id name storage_type lat lon 0 0a736a1816c0fea49a88... 104815 probable f726416f49adcac6d5d296c49a00... HOM009 refinery -60 24 1 b96adfb025a719b66927... 139279 unlikely f726416f49adcac6d5d296c49a00... HOM022 refinery 100 -90 vortexasdk.endpoints.asset_tanks_result AssetTankResult AssetTankResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Asset Tanks endpoint. to_list AssetTankResult.to_list(self) -> List[vortexasdk.api.asset_tank.AssetTank] Represent asset tanks as a list. to_df AssetTankResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent asset tanks as a pd.DataFrame . Arguments columns : The asset tanks features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'capacity_bbl', 'crude_confidence', 'location_id', 'name', 'storage_type', 'lat', 'lon'] . Returns pd.DataFrame of asset tanks.","title":"Asset Tanks"},{"location":"endpoints/attributes/","text":"vortexasdk.endpoints.attributes Try me out in your browser: Attributes Attributes(self) Attributes endpoint. An Attribute is a reference value that corresponds to an ID associated with other entities. For example, a vessel object from the Vessel reference endpoint may have the following keys: { \"ice_class\": \"b09ed4e2bd6904dd\", \"propulsion\": \"3ace0e050724707b\" } These IDs represent attributes which can be found via the Attributes reference endpoint. When the attributes endpoint is searched with those ids as parameters: >>> from vortexasdk import Attributes >>> df = Attributes().search(ids=[\"b09ed4e2bd6904dd\", \"3ace0e050724707b\"]).to_df() Returns id type label 0 b09ed4e2bd6904dd ice_class UNKNOWN 1 3ace0e050724707b propulsion DFDE load_all Attributes.load_all(self) -> vortexasdk.endpoints.attributes_result.AttributeResult Load all attributes. search Attributes.search(self, type: str = None, term: Union[str, List[str]] = None, ids: Union[str, List[str]] = None) -> vortexasdk.endpoints.attributes_result.AttributeResult Find all attributes matching given type. Arguments type : The type of attribute we're filtering on. Type can be: ice_class , propulsion , scrubber Returns List of attributes matching type Examples Find all attributes with a type of ice_class . >>> from vortexasdk import Attributes >>> df = Attributes().search(type=\"scrubber\").to_df() returns id name type 0 14c7b073809eb565 Open Loop scrubber 1 478fca39000c49d6 Unknown scrubber vortexasdk.endpoints.attributes_result AttributeResult AttributeResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Attributes endpoint. to_list AttributeResult.to_list(self) -> List[vortexasdk.api.attribute.Attribute] Represent attributes as a list. to_df AttributeResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent attributes as a pd.DataFrame . Arguments columns : The attributes features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'type'] . Returns pd.DataFrame of attributes.","title":"Attributes"},{"location":"endpoints/cargo_movements/","text":"vortexasdk.endpoints.cargo_movements Try me out in your browser: CargoMovements CargoMovements(self) Cargo Movements Endpoint, use this to search through Vortexa's cargo movements. A detailed explanation of Cargo/Vessel Movements can be found here . search CargoMovements.search(self, filter_activity: str = None, filter_time_min: datetime.datetime = datetime.datetime(2023, 9, 7, 10, 23, 46, 776785), filter_time_max: datetime.datetime = datetime.datetime(2023, 9, 7, 10, 23, 46, 776788), cm_unit: str = 'b', filter_charterers: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, disable_geographic_exclusion_rules: bool = None) -> vortexasdk.endpoints.cargo_movements_result.CargoMovementsResult Find CargoMovements matching the given search parameters. Arguments filter_activity : Movement activity on which to base the time filter. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'unloaded_state', 'storing_state', 'storing_start', 'storing_end', 'transiting_state', 'any_activity', 'oil_on_water_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. cm_unit : Unit of measurement. Enter 'b' for barrels or 't' for tonnes. filter_charterers : A charterer ID, or list of charterer IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_flags : A vessel flag, or list of vessel flags to filter on. filter_vessel_ice_class : An attribute ID, or list of attribute IDs to filter on. filter_vessel_propulsion : An attribute ID, or list of attribute IDs to filter on. exclude_origins : A geography ID, or list of geography IDs to exclude. exclude_destinations : A geography ID, or list of geography IDs to exclude. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_charterers : A charterer ID, or list of charterer IDs to exclude. exclude_filter_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_flags : A geography ID, or list of geography IDs to exclude. exclude_vessel_ice_class : An attribute ID, or list of attribute IDs to exclude. exclude_vessel_propulsion : An attribute ID, or list of attribute IDs to exclude. disable_geographic_exclusion_rules : This controls a popular industry term \"intra-movements\" and determines the filter behaviour for cargo leaving then entering the same geographic area. Returns CargoMovementsResult , containing all the cargo movements matching the given search terms. Example Which cargoes were loaded from Rotterdam on the morning of 1st December 2018? >>> from vortexasdk import CargoMovements, Geographies >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> search_result = CargoMovements().search( ... filter_origins=rotterdam, ... filter_activity='loading_state', ... filter_time_min=datetime(2018, 12, 1), ... filter_time_max=datetime(2018, 12, 1, 12)) >>> df = search_result.to_df(columns=['product.grade.label', 'product.group.label', 'vessels.0.vessel_class']) product.group.label product.grade.label vessels.0.vessel_class 0 Clean products Pygas general_purpose 1 Clean products Chemicals tiny_tanker 2 Clean products Chemicals tiny_tanker 3 Dirty products Low Sulphur VGO (LSVGO) general_purpose 4 Clean products ULSD (Ultra Low Sulphur Diesel) general_purpose 5 Clean products Chemicals tiny_tanker 6 Clean products Finished Gasoline handymax Which VLCC cargoes passed through the Suez canal en route to China? Note here we include vessels.0..., vessels.1..., vessels.2... columns. This lets us view all vessels present in any STS operations. >>> from vortexasdk import CargoMovements, Geographies, Vessels >>> suez = [g.id for g in Geographies().search(\"suez\").to_list()] >>> china = [g.id for g in Geographies().search(\"china\").to_list() if \"country\" in g.layer] >>> vlccs = [v.id for v in Vessels().search(vessel_classes=\"vlcc_plus\").to_list()] >>> cargo_movement_search_result = CargoMovements().search( ... filter_destinations=china, ... filter_activity=\"loading_state\", ... filter_waypoints=suez, ... filter_vessels=vlccs, ... filter_time_min=datetime(2018, 12, 1), ... filter_time_max=datetime(2018, 12, 1)) >>> cols = ['vessels.0.name', 'vessels.0.vessel_class', 'vessels.1.name', 'vessels.1.vessel_class', 'vessels.2.name', 'vessels.2.vessel_class', 'product.group.label', 'quantity'] >>> cargo_movements_df = cargo_movement_search_result.to_df(columns=cols) vessels.0.name vessels.0.vessel_class vessels.1.name vessels.1.vessel_class vessels.2.name vessels.2.vessel_class product.group.label quantity 0 MINERVA MARINA suezmax COSGLORY LAKE vlcc_plus nan nan Crude 700614 1 BUKHA vlcc_plus nan nan nan nan Crude 1896374 2 ATHENIAN FREEDOM vlcc_plus nan nan nan nan Crude 183537 3 ATINA suezmax DONAT suezmax DS VISION vlcc_plus Crude 896773 4 MINERVA MARINA suezmax COSGLORY LAKE vlcc_plus nan nan Crude 405724 5 MASAL suezmax EKTA vlcc_plus nan nan Crude 997896 6 ATHENIAN FREEDOM vlcc_plus nan nan nan nan Crude 120812 Cargo Movements Endpoint Further Documentation record CargoMovements.record(self, id: str, params: Dict = {}) -> Dict Perform a cargo movement lookup. Arguments id : Cargo movement ID to lookup (long_id or short_id) params : Supported search params: 'unit' : enter 'b' for barrels, 't' for tonnes and 'cbm' for cubic meters Returns Cargo movement record matching the ID Further Documentation: VortexaAPI Cargo Movement vortexasdk.endpoints.cargo_movements_result CargoMovementsResult CargoMovementsResult(__pydantic_self__, **data: Any) -> None Container class holdings search results returns from the cargo movements endpoint. This class has two methods, to_list() , and to_df() , allowing search results to be represented as a list of CargoMovements , or as a pd.DataFrame , respectively. to_list CargoMovementsResult.to_list(self) -> List[vortexasdk.api.cargo_movement.CargoMovement] Represent cargo movements as a list of CargoMovementEntity s. to_df CargoMovementsResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent cargo movements as a pd.DataFrame . Arguments columns : Output columns present in the pd.DataFrame . Enter columns='all' to return all available columns. Enter columns=None to use cargo_movements.DEFAULT_COLUMNS . Returns pd.DataFrame , one row per cargo movement. Notes A cargo movement is a complicated, nested structure. Between it's point of loading and discharge, a cargo movement may be carried by N or more vessels, with N-1 associated STS events. Each of these N vessels could have an associated effective controller, charterer, time charterer... etc. In order to represent a cargo movement as a flat (not nested) record in a dataframe, the sdk flattens the cargo movement, generating many columns in the process. The columns are logically named. Let's say that a cargo is transferred between 4 vessels en route from a load in Rotterdam to a discharge in New York. This is represented as 1 cargo_port_unload_event , followed by 3 cargo_sts_event s, and finally 1 cargo_port_unload_event . In this example the name of the 1st vessel, is found in the vessels.0.name column (we're using zero-based numbering indexes). Likewise, the imo of the second vessel is found in the vessels.1.imo column. To find the name of the country in which the second STS event occured, we'd use the events.cargo_sts_event.1.location.country.layer column. Similarly, to find out when the first vessel started loading the cargo from Rotterdam, we'd use the events.cargo_port_load_event.0.start_timestamp column. By default, the columns returned are something along the lines of. DEFAULT_COLUMNS = [ 'events.cargo_port_load_event.0.location.port.label', 'events.cargo_port_unload_event.0.location.port.label', 'product.group.label', 'product.grade.label', 'quantity', 'vessels.0.name', 'events.cargo_port_load_event.0.end_timestamp', 'events.cargo_port_unload_event.0.start_timestamp', ] The exact default columns used can be found at cargo_movements.DEFAULT_COLUMNS A near complete list of columns is given below [ 'cargo_movement_id', 'events.cargo_fso_load_event.0.end_timestamp', 'events.cargo_fso_load_event.0.event_type', 'events.cargo_fso_load_event.0.fso_vessel_id', 'events.cargo_fso_load_event.0.fso_vessel_name', 'events.cargo_fso_load_event.0.location.country.id', 'events.cargo_fso_load_event.0.location.country.label', 'events.cargo_fso_load_event.0.location.country.layer', 'events.cargo_fso_load_event.0.location.country.probability', 'events.cargo_fso_load_event.0.location.country.source', 'events.cargo_fso_load_event.0.location.region.id', 'events.cargo_fso_load_event.0.location.region.label', 'events.cargo_fso_load_event.0.location.region.layer', 'events.cargo_fso_load_event.0.location.region.probability', 'events.cargo_fso_load_event.0.location.region.source', 'events.cargo_fso_load_event.0.location.shipping_region.id', 'events.cargo_fso_load_event.0.location.shipping_region.label', 'events.cargo_fso_load_event.0.location.shipping_region.layer', 'events.cargo_fso_load_event.0.location.shipping_region.probability', 'events.cargo_fso_load_event.0.location.shipping_region.source', 'events.cargo_fso_load_event.0.location.sts_zone.id', 'events.cargo_fso_load_event.0.location.sts_zone.label', 'events.cargo_fso_load_event.0.location.sts_zone.layer', 'events.cargo_fso_load_event.0.location.sts_zone.probability', 'events.cargo_fso_load_event.0.location.sts_zone.source', 'events.cargo_fso_load_event.0.location.trading_block.id', 'events.cargo_fso_load_event.0.location.trading_block.label', 'events.cargo_fso_load_event.0.location.trading_block.layer', 'events.cargo_fso_load_event.0.location.trading_block.probability', 'events.cargo_fso_load_event.0.location.trading_block.source', 'events.cargo_fso_load_event.0.location.trading_region.id', 'events.cargo_fso_load_event.0.location.trading_region.label', 'events.cargo_fso_load_event.0.location.trading_region.layer', 'events.cargo_fso_load_event.0.location.trading_region.probability', 'events.cargo_fso_load_event.0.location.trading_region.source', 'events.cargo_fso_load_event.0.location.trading_subregion.id', 'events.cargo_fso_load_event.0.location.trading_subregion.label', 'events.cargo_fso_load_event.0.location.trading_subregion.layer', 'events.cargo_fso_load_event.0.location.trading_subregion.probability', 'events.cargo_fso_load_event.0.location.trading_subregion.source', 'events.cargo_fso_load_event.0.pos.0', 'events.cargo_fso_load_event.0.pos.1', 'events.cargo_fso_load_event.0.probability', 'events.cargo_fso_load_event.0.start_timestamp', 'events.cargo_fso_load_event.0.to_vessel_id', 'events.cargo_fso_load_event.0.to_vessel_name', 'events.cargo_fso_unload_event.0.end_timestamp', 'events.cargo_fso_unload_event.0.event_type', 'events.cargo_fso_unload_event.0.from_vessel_id', 'events.cargo_fso_unload_event.0.from_vessel_name', 'events.cargo_fso_unload_event.0.fso_vessel_id', 'events.cargo_fso_unload_event.0.fso_vessel_name', 'events.cargo_fso_unload_event.0.location.country.id', 'events.cargo_fso_unload_event.0.location.country.label', 'events.cargo_fso_unload_event.0.location.country.layer', 'events.cargo_fso_unload_event.0.location.country.probability', 'events.cargo_fso_unload_event.0.location.country.source', 'events.cargo_fso_unload_event.0.location.region.id', 'events.cargo_fso_unload_event.0.location.region.label', 'events.cargo_fso_unload_event.0.location.region.layer', 'events.cargo_fso_unload_event.0.location.region.probability', 'events.cargo_fso_unload_event.0.location.region.source', 'events.cargo_fso_unload_event.0.location.shipping_region.id', 'events.cargo_fso_unload_event.0.location.shipping_region.label', 'events.cargo_fso_unload_event.0.location.shipping_region.layer', 'events.cargo_fso_unload_event.0.location.shipping_region.probability', 'events.cargo_fso_unload_event.0.location.shipping_region.source', 'events.cargo_fso_unload_event.0.location.sts_zone.id', 'events.cargo_fso_unload_event.0.location.sts_zone.label', 'events.cargo_fso_unload_event.0.location.sts_zone.layer', 'events.cargo_fso_unload_event.0.location.sts_zone.probability', 'events.cargo_fso_unload_event.0.location.sts_zone.source', 'events.cargo_fso_unload_event.0.location.trading_block.id', 'events.cargo_fso_unload_event.0.location.trading_block.label', 'events.cargo_fso_unload_event.0.location.trading_block.layer', 'events.cargo_fso_unload_event.0.location.trading_block.probability', 'events.cargo_fso_unload_event.0.location.trading_block.source', 'events.cargo_fso_unload_event.0.location.trading_region.id', 'events.cargo_fso_unload_event.0.location.trading_region.label', 'events.cargo_fso_unload_event.0.location.trading_region.layer', 'events.cargo_fso_unload_event.0.location.trading_region.probability', 'events.cargo_fso_unload_event.0.location.trading_region.source', 'events.cargo_fso_unload_event.0.location.trading_subregion.id', 'events.cargo_fso_unload_event.0.location.trading_subregion.label', 'events.cargo_fso_unload_event.0.location.trading_subregion.layer', 'events.cargo_fso_unload_event.0.location.trading_subregion.probability', 'events.cargo_fso_unload_event.0.location.trading_subregion.source', 'events.cargo_fso_unload_event.0.pos.0', 'events.cargo_fso_unload_event.0.pos.1', 'events.cargo_fso_unload_event.0.probability', 'events.cargo_fso_unload_event.0.start_timestamp', 'events.cargo_port_load_event.0.end_timestamp', 'events.cargo_port_load_event.0.event_type', 'events.cargo_port_load_event.0.location.country.id', 'events.cargo_port_load_event.0.location.country.label', 'events.cargo_port_load_event.0.location.country.layer', 'events.cargo_port_load_event.0.location.country.probability', 'events.cargo_port_load_event.0.location.country.source', 'events.cargo_port_load_event.0.location.port.id', 'events.cargo_port_load_event.0.location.port.label', 'events.cargo_port_load_event.0.location.port.layer', 'events.cargo_port_load_event.0.location.port.probability', 'events.cargo_port_load_event.0.location.port.source', 'events.cargo_port_load_event.0.location.region.id', 'events.cargo_port_load_event.0.location.region.label', 'events.cargo_port_load_event.0.location.region.layer', 'events.cargo_port_load_event.0.location.region.probability', 'events.cargo_port_load_event.0.location.region.source', 'events.cargo_port_load_event.0.location.shipping_region.id', 'events.cargo_port_load_event.0.location.shipping_region.label', 'events.cargo_port_load_event.0.location.shipping_region.layer', 'events.cargo_port_load_event.0.location.shipping_region.probability', 'events.cargo_port_load_event.0.location.shipping_region.source', 'events.cargo_port_load_event.0.location.terminal.id', 'events.cargo_port_load_event.0.location.terminal.label', 'events.cargo_port_load_event.0.location.terminal.layer', 'events.cargo_port_load_event.0.location.terminal.probability', 'events.cargo_port_load_event.0.location.terminal.source', 'events.cargo_port_load_event.0.location.trading_block.id', 'events.cargo_port_load_event.0.location.trading_block.label', 'events.cargo_port_load_event.0.location.trading_block.layer', 'events.cargo_port_load_event.0.location.trading_block.probability', 'events.cargo_port_load_event.0.location.trading_block.source', 'events.cargo_port_load_event.0.location.trading_region.id', 'events.cargo_port_load_event.0.location.trading_region.label', 'events.cargo_port_load_event.0.location.trading_region.layer', 'events.cargo_port_load_event.0.location.trading_region.probability', 'events.cargo_port_load_event.0.location.trading_region.source', 'events.cargo_port_load_event.0.location.trading_subregion.id', 'events.cargo_port_load_event.0.location.trading_subregion.label', 'events.cargo_port_load_event.0.location.trading_subregion.layer', 'events.cargo_port_load_event.0.location.trading_subregion.probability', 'events.cargo_port_load_event.0.location.trading_subregion.source', 'events.cargo_port_load_event.0.pos.0', 'events.cargo_port_load_event.0.pos.1', 'events.cargo_port_load_event.0.probability', 'events.cargo_port_load_event.0.start_timestamp', 'events.cargo_port_unload_event.0.end_timestamp', 'events.cargo_port_unload_event.0.event_type', 'events.cargo_port_unload_event.0.location.country.id', 'events.cargo_port_unload_event.0.location.country.label', 'events.cargo_port_unload_event.0.location.country.layer', 'events.cargo_port_unload_event.0.location.country.probability', 'events.cargo_port_unload_event.0.location.country.source', 'events.cargo_port_unload_event.0.location.port.id', 'events.cargo_port_unload_event.0.location.port.label', 'events.cargo_port_unload_event.0.location.port.layer', 'events.cargo_port_unload_event.0.location.port.probability', 'events.cargo_port_unload_event.0.location.port.source', 'events.cargo_port_unload_event.0.location.region.id', 'events.cargo_port_unload_event.0.location.region.label', 'events.cargo_port_unload_event.0.location.region.layer', 'events.cargo_port_unload_event.0.location.region.probability', 'events.cargo_port_unload_event.0.location.region.source', 'events.cargo_port_unload_event.0.location.shipping_region.id', 'events.cargo_port_unload_event.0.location.shipping_region.label', 'events.cargo_port_unload_event.0.location.shipping_region.layer', 'events.cargo_port_unload_event.0.location.shipping_region.probability', 'events.cargo_port_unload_event.0.location.shipping_region.source', 'events.cargo_port_unload_event.0.location.sts_zone.id', 'events.cargo_port_unload_event.0.location.sts_zone.label', 'events.cargo_port_unload_event.0.location.sts_zone.layer', 'events.cargo_port_unload_event.0.location.sts_zone.probability', 'events.cargo_port_unload_event.0.location.sts_zone.source', 'events.cargo_port_unload_event.0.location.terminal.id', 'events.cargo_port_unload_event.0.location.terminal.label', 'events.cargo_port_unload_event.0.location.terminal.layer', 'events.cargo_port_unload_event.0.location.terminal.probability', 'events.cargo_port_unload_event.0.location.terminal.source', 'events.cargo_port_unload_event.0.location.trading_block.id', 'events.cargo_port_unload_event.0.location.trading_block.label', 'events.cargo_port_unload_event.0.location.trading_block.layer', 'events.cargo_port_unload_event.0.location.trading_block.probability', 'events.cargo_port_unload_event.0.location.trading_block.source', 'events.cargo_port_unload_event.0.location.trading_region.id', 'events.cargo_port_unload_event.0.location.trading_region.label', 'events.cargo_port_unload_event.0.location.trading_region.layer', 'events.cargo_port_unload_event.0.location.trading_region.probability', 'events.cargo_port_unload_event.0.location.trading_region.source', 'events.cargo_port_unload_event.0.location.trading_subregion.id', 'events.cargo_port_unload_event.0.location.trading_subregion.label', 'events.cargo_port_unload_event.0.location.trading_subregion.layer', 'events.cargo_port_unload_event.0.location.trading_subregion.probability', 'events.cargo_port_unload_event.0.location.trading_subregion.source', 'events.cargo_port_unload_event.0.pos.0', 'events.cargo_port_unload_event.0.pos.1', 'events.cargo_port_unload_event.0.probability', 'events.cargo_port_unload_event.0.start_timestamp', 'events.cargo_storage_event.0.end_timestamp', 'events.cargo_storage_event.0.event_type', 'events.cargo_storage_event.0.location.country.id', 'events.cargo_storage_event.0.location.country.label', 'events.cargo_storage_event.0.location.country.layer', 'events.cargo_storage_event.0.location.country.probability', 'events.cargo_storage_event.0.location.country.source', 'events.cargo_storage_event.0.location.region.id', 'events.cargo_storage_event.0.location.region.label', 'events.cargo_storage_event.0.location.region.layer', 'events.cargo_storage_event.0.location.region.probability', 'events.cargo_storage_event.0.location.region.source', 'events.cargo_storage_event.0.location.shipping_region.id', 'events.cargo_storage_event.0.location.shipping_region.label', 'events.cargo_storage_event.0.location.shipping_region.layer', 'events.cargo_storage_event.0.location.shipping_region.probability', 'events.cargo_storage_event.0.location.shipping_region.source', 'events.cargo_storage_event.0.location.trading_block.id', 'events.cargo_storage_event.0.location.trading_block.label', 'events.cargo_storage_event.0.location.trading_block.layer', 'events.cargo_storage_event.0.location.trading_block.probability', 'events.cargo_storage_event.0.location.trading_block.source', 'events.cargo_storage_event.0.location.trading_region.id', 'events.cargo_storage_event.0.location.trading_region.label', 'events.cargo_storage_event.0.location.trading_region.layer', 'events.cargo_storage_event.0.location.trading_region.probability', 'events.cargo_storage_event.0.location.trading_region.source', 'events.cargo_storage_event.0.location.trading_subregion.id', 'events.cargo_storage_event.0.location.trading_subregion.label', 'events.cargo_storage_event.0.location.trading_subregion.layer', 'events.cargo_storage_event.0.location.trading_subregion.probability', 'events.cargo_storage_event.0.location.trading_subregion.source', 'events.cargo_storage_event.0.pos.0', 'events.cargo_storage_event.0.pos.1', 'events.cargo_storage_event.0.start_timestamp', 'events.cargo_storage_event.0.vessel_id', 'events.cargo_sts_event.0.end_timestamp', 'events.cargo_sts_event.0.event_type', 'events.cargo_sts_event.0.from_vessel_id', 'events.cargo_sts_event.0.from_vessel_name', 'events.cargo_sts_event.0.location.country.id', 'events.cargo_sts_event.0.location.country.label', 'events.cargo_sts_event.0.location.country.layer', 'events.cargo_sts_event.0.location.country.probability', 'events.cargo_sts_event.0.location.country.source', 'events.cargo_sts_event.0.location.port.id', 'events.cargo_sts_event.0.location.port.label', 'events.cargo_sts_event.0.location.port.layer', 'events.cargo_sts_event.0.location.port.probability', 'events.cargo_sts_event.0.location.port.source', 'events.cargo_sts_event.0.location.region.id', 'events.cargo_sts_event.0.location.region.label', 'events.cargo_sts_event.0.location.region.layer', 'events.cargo_sts_event.0.location.region.probability', 'events.cargo_sts_event.0.location.region.source', 'events.cargo_sts_event.0.location.shipping_region.id', 'events.cargo_sts_event.0.location.shipping_region.label', 'events.cargo_sts_event.0.location.shipping_region.layer', 'events.cargo_sts_event.0.location.shipping_region.probability', 'events.cargo_sts_event.0.location.shipping_region.source', 'events.cargo_sts_event.0.location.sts_zone.id', 'events.cargo_sts_event.0.location.sts_zone.label', 'events.cargo_sts_event.0.location.sts_zone.layer', 'events.cargo_sts_event.0.location.sts_zone.probability', 'events.cargo_sts_event.0.location.sts_zone.source', 'events.cargo_sts_event.0.location.trading_block.id', 'events.cargo_sts_event.0.location.trading_block.label', 'events.cargo_sts_event.0.location.trading_block.layer', 'events.cargo_sts_event.0.location.trading_block.probability', 'events.cargo_sts_event.0.location.trading_block.source', 'events.cargo_sts_event.0.location.trading_region.id', 'events.cargo_sts_event.0.location.trading_region.label', 'events.cargo_sts_event.0.location.trading_region.layer', 'events.cargo_sts_event.0.location.trading_region.probability', 'events.cargo_sts_event.0.location.trading_region.source', 'events.cargo_sts_event.0.location.trading_subregion.id', 'events.cargo_sts_event.0.location.trading_subregion.label', 'events.cargo_sts_event.0.location.trading_subregion.layer', 'events.cargo_sts_event.0.location.trading_subregion.probability', 'events.cargo_sts_event.0.location.trading_subregion.source', 'events.cargo_sts_event.0.pos.0', 'events.cargo_sts_event.0.pos.1', 'events.cargo_sts_event.0.start_timestamp', 'events.cargo_sts_event.0.to_vessel_id', 'events.cargo_sts_event.0.to_vessel_name', 'events.cargo_sts_event.1.end_timestamp', 'events.cargo_sts_event.1.event_type', 'events.cargo_sts_event.1.from_vessel_id', 'events.cargo_sts_event.1.from_vessel_name', 'events.cargo_sts_event.1.location.country.id', 'events.cargo_sts_event.1.location.country.label', 'events.cargo_sts_event.1.location.country.layer', 'events.cargo_sts_event.1.location.country.probability', 'events.cargo_sts_event.1.location.country.source', 'events.cargo_sts_event.1.location.region.id', 'events.cargo_sts_event.1.location.region.label', 'events.cargo_sts_event.1.location.region.layer', 'events.cargo_sts_event.1.location.region.probability', 'events.cargo_sts_event.1.location.region.source', 'events.cargo_sts_event.1.location.shipping_region.id', 'events.cargo_sts_event.1.location.shipping_region.label', 'events.cargo_sts_event.1.location.shipping_region.layer', 'events.cargo_sts_event.1.location.shipping_region.probability', 'events.cargo_sts_event.1.location.shipping_region.source', 'events.cargo_sts_event.1.location.sts_zone.id', 'events.cargo_sts_event.1.location.sts_zone.label', 'events.cargo_sts_event.1.location.sts_zone.layer', 'events.cargo_sts_event.1.location.sts_zone.probability', 'events.cargo_sts_event.1.location.sts_zone.source', 'events.cargo_sts_event.1.location.trading_block.id', 'events.cargo_sts_event.1.location.trading_block.label', 'events.cargo_sts_event.1.location.trading_block.layer', 'events.cargo_sts_event.1.location.trading_block.probability', 'events.cargo_sts_event.1.location.trading_block.source', 'events.cargo_sts_event.1.location.trading_region.id', 'events.cargo_sts_event.1.location.trading_region.label', 'events.cargo_sts_event.1.location.trading_region.layer', 'events.cargo_sts_event.1.location.trading_region.probability', 'events.cargo_sts_event.1.location.trading_region.source', 'events.cargo_sts_event.1.location.trading_subregion.id', 'events.cargo_sts_event.1.location.trading_subregion.label', 'events.cargo_sts_event.1.location.trading_subregion.layer', 'events.cargo_sts_event.1.location.trading_subregion.probability', 'events.cargo_sts_event.1.location.trading_subregion.source', 'events.cargo_sts_event.1.pos.0', 'events.cargo_sts_event.1.pos.1', 'events.cargo_sts_event.1.start_timestamp', 'events.cargo_sts_event.1.to_vessel_id', 'events.cargo_sts_event.1.to_vessel_name', 'product.category.id', 'product.category.label', 'product.category.layer', 'product.category.probability', 'product.category.source', 'product.grade.id', 'product.grade.label', 'product.grade.layer', 'product.grade.probability', 'product.grade.source', 'product.group.id', 'product.group.label', 'product.group.layer', 'product.group.probability', 'product.group.source', 'product.group_product.id', 'product.group_product.label', 'product.group_product.layer', 'product.group_product.probability', 'product.group_product.source', 'quantity', 'status', 'vessels.0.corporate_entities.charterer.id', 'vessels.0.corporate_entities.charterer.label', 'vessels.0.corporate_entities.charterer.layer', 'vessels.0.corporate_entities.charterer.probability', 'vessels.0.corporate_entities.charterer.source', 'vessels.0.corporate_entities.effective_controller.id', 'vessels.0.corporate_entities.effective_controller.label', 'vessels.0.corporate_entities.effective_controller.layer', 'vessels.0.corporate_entities.effective_controller.probability', 'vessels.0.corporate_entities.effective_controller.source', 'vessels.0.corporate_entities.time_charterer.end_timestamp', 'vessels.0.corporate_entities.time_charterer.id', 'vessels.0.corporate_entities.time_charterer.label', 'vessels.0.corporate_entities.time_charterer.layer', 'vessels.0.corporate_entities.time_charterer.probability', 'vessels.0.corporate_entities.time_charterer.source', 'vessels.0.corporate_entities.time_charterer.start_timestamp', 'vessels.0.cubic_capacity', 'vessels.0.dwt', 'vessels.0.end_timestamp', 'vessels.0.fixture_fulfilled', 'vessels.0.fixture_id', 'vessels.0.id', 'vessels.0.imo', 'vessels.0.mmsi', 'vessels.0.name', 'vessels.0.start_timestamp', 'vessels.0.status', 'vessels.0.tags.0.end_timestamp', 'vessels.0.tags.0.start_timestamp', 'vessels.0.tags.0.tag', 'vessels.0.vessel_class', 'vessels.0.voyage_id', 'vessels.1.corporate_entities.charterer.id', 'vessels.1.corporate_entities.charterer.label', 'vessels.1.corporate_entities.charterer.layer', 'vessels.1.corporate_entities.charterer.probability', 'vessels.1.corporate_entities.charterer.source', 'vessels.1.corporate_entities.effective_controller.id', 'vessels.1.corporate_entities.effective_controller.label', 'vessels.1.corporate_entities.effective_controller.layer', 'vessels.1.corporate_entities.effective_controller.probability', 'vessels.1.corporate_entities.effective_controller.source', 'vessels.1.corporate_entities.time_charterer.end_timestamp', 'vessels.1.corporate_entities.time_charterer.id', 'vessels.1.corporate_entities.time_charterer.label', 'vessels.1.corporate_entities.time_charterer.layer', 'vessels.1.corporate_entities.time_charterer.probability', 'vessels.1.corporate_entities.time_charterer.source', 'vessels.1.corporate_entities.time_charterer.start_timestamp', 'vessels.1.cubic_capacity', 'vessels.1.dwt', 'vessels.1.end_timestamp', 'vessels.1.fixture_fulfilled', 'vessels.1.id', 'vessels.1.imo', 'vessels.1.mmsi', 'vessels.1.name', 'vessels.1.start_timestamp', 'vessels.1.status', 'vessels.1.tags.0.end_timestamp', 'vessels.1.tags.0.start_timestamp', 'vessels.1.tags.0.tag', 'vessels.1.vessel_class', 'vessels.1.voyage_id', 'vessels.2.corporate_entities.charterer.id', 'vessels.2.corporate_entities.charterer.label', 'vessels.2.corporate_entities.charterer.layer', 'vessels.2.corporate_entities.charterer.probability', 'vessels.2.corporate_entities.charterer.source', 'vessels.2.corporate_entities.effective_controller.id', 'vessels.2.corporate_entities.effective_controller.label', 'vessels.2.corporate_entities.effective_controller.layer', 'vessels.2.corporate_entities.effective_controller.probability', 'vessels.2.corporate_entities.effective_controller.source', 'vessels.2.corporate_entities.time_charterer.end_timestamp', 'vessels.2.corporate_entities.time_charterer.id', 'vessels.2.corporate_entities.time_charterer.label', 'vessels.2.corporate_entities.time_charterer.layer', 'vessels.2.corporate_entities.time_charterer.probability', 'vessels.2.corporate_entities.time_charterer.source', 'vessels.2.corporate_entities.time_charterer.start_timestamp', 'vessels.2.cubic_capacity', 'vessels.2.dwt', 'vessels.2.end_timestamp', 'vessels.2.id', 'vessels.2.imo', 'vessels.2.mmsi', 'vessels.2.name', 'vessels.2.start_timestamp', 'vessels.2.status', 'vessels.2.tags.0.start_timestamp', 'vessels.2.tags.0.tag', 'vessels.2.vessel_class', 'vessels.2.voyage_id', 'vessels.3.corporate_entities.effective_controller.id', 'vessels.3.corporate_entities.effective_controller.label', 'vessels.3.corporate_entities.effective_controller.layer', 'vessels.3.corporate_entities.effective_controller.probability', 'vessels.3.corporate_entities.effective_controller.source', 'vessels.3.cubic_capacity', 'vessels.3.dwt', 'vessels.3.id', 'vessels.3.imo', 'vessels.3.mmsi', 'vessels.3.name', 'vessels.3.start_timestamp', 'vessels.3.status', 'vessels.3.vessel_class', 'vessels.3.voyage_id', 'parent_ids.0.id', 'parent_ids.0.splinter_timestamp', 'parent_ids.1.id', 'parent_ids.1.splinter_timestamp', ]","title":"Cargo Movements"},{"location":"endpoints/cargo_movements/#notes","text":"A cargo movement is a complicated, nested structure. Between it's point of loading and discharge, a cargo movement may be carried by N or more vessels, with N-1 associated STS events. Each of these N vessels could have an associated effective controller, charterer, time charterer... etc. In order to represent a cargo movement as a flat (not nested) record in a dataframe, the sdk flattens the cargo movement, generating many columns in the process. The columns are logically named. Let's say that a cargo is transferred between 4 vessels en route from a load in Rotterdam to a discharge in New York. This is represented as 1 cargo_port_unload_event , followed by 3 cargo_sts_event s, and finally 1 cargo_port_unload_event . In this example the name of the 1st vessel, is found in the vessels.0.name column (we're using zero-based numbering indexes). Likewise, the imo of the second vessel is found in the vessels.1.imo column. To find the name of the country in which the second STS event occured, we'd use the events.cargo_sts_event.1.location.country.layer column. Similarly, to find out when the first vessel started loading the cargo from Rotterdam, we'd use the events.cargo_port_load_event.0.start_timestamp column. By default, the columns returned are something along the lines of. DEFAULT_COLUMNS = [ 'events.cargo_port_load_event.0.location.port.label', 'events.cargo_port_unload_event.0.location.port.label', 'product.group.label', 'product.grade.label', 'quantity', 'vessels.0.name', 'events.cargo_port_load_event.0.end_timestamp', 'events.cargo_port_unload_event.0.start_timestamp', ] The exact default columns used can be found at cargo_movements.DEFAULT_COLUMNS A near complete list of columns is given below [ 'cargo_movement_id', 'events.cargo_fso_load_event.0.end_timestamp', 'events.cargo_fso_load_event.0.event_type', 'events.cargo_fso_load_event.0.fso_vessel_id', 'events.cargo_fso_load_event.0.fso_vessel_name', 'events.cargo_fso_load_event.0.location.country.id', 'events.cargo_fso_load_event.0.location.country.label', 'events.cargo_fso_load_event.0.location.country.layer', 'events.cargo_fso_load_event.0.location.country.probability', 'events.cargo_fso_load_event.0.location.country.source', 'events.cargo_fso_load_event.0.location.region.id', 'events.cargo_fso_load_event.0.location.region.label', 'events.cargo_fso_load_event.0.location.region.layer', 'events.cargo_fso_load_event.0.location.region.probability', 'events.cargo_fso_load_event.0.location.region.source', 'events.cargo_fso_load_event.0.location.shipping_region.id', 'events.cargo_fso_load_event.0.location.shipping_region.label', 'events.cargo_fso_load_event.0.location.shipping_region.layer', 'events.cargo_fso_load_event.0.location.shipping_region.probability', 'events.cargo_fso_load_event.0.location.shipping_region.source', 'events.cargo_fso_load_event.0.location.sts_zone.id', 'events.cargo_fso_load_event.0.location.sts_zone.label', 'events.cargo_fso_load_event.0.location.sts_zone.layer', 'events.cargo_fso_load_event.0.location.sts_zone.probability', 'events.cargo_fso_load_event.0.location.sts_zone.source', 'events.cargo_fso_load_event.0.location.trading_block.id', 'events.cargo_fso_load_event.0.location.trading_block.label', 'events.cargo_fso_load_event.0.location.trading_block.layer', 'events.cargo_fso_load_event.0.location.trading_block.probability', 'events.cargo_fso_load_event.0.location.trading_block.source', 'events.cargo_fso_load_event.0.location.trading_region.id', 'events.cargo_fso_load_event.0.location.trading_region.label', 'events.cargo_fso_load_event.0.location.trading_region.layer', 'events.cargo_fso_load_event.0.location.trading_region.probability', 'events.cargo_fso_load_event.0.location.trading_region.source', 'events.cargo_fso_load_event.0.location.trading_subregion.id', 'events.cargo_fso_load_event.0.location.trading_subregion.label', 'events.cargo_fso_load_event.0.location.trading_subregion.layer', 'events.cargo_fso_load_event.0.location.trading_subregion.probability', 'events.cargo_fso_load_event.0.location.trading_subregion.source', 'events.cargo_fso_load_event.0.pos.0', 'events.cargo_fso_load_event.0.pos.1', 'events.cargo_fso_load_event.0.probability', 'events.cargo_fso_load_event.0.start_timestamp', 'events.cargo_fso_load_event.0.to_vessel_id', 'events.cargo_fso_load_event.0.to_vessel_name', 'events.cargo_fso_unload_event.0.end_timestamp', 'events.cargo_fso_unload_event.0.event_type', 'events.cargo_fso_unload_event.0.from_vessel_id', 'events.cargo_fso_unload_event.0.from_vessel_name', 'events.cargo_fso_unload_event.0.fso_vessel_id', 'events.cargo_fso_unload_event.0.fso_vessel_name', 'events.cargo_fso_unload_event.0.location.country.id', 'events.cargo_fso_unload_event.0.location.country.label', 'events.cargo_fso_unload_event.0.location.country.layer', 'events.cargo_fso_unload_event.0.location.country.probability', 'events.cargo_fso_unload_event.0.location.country.source', 'events.cargo_fso_unload_event.0.location.region.id', 'events.cargo_fso_unload_event.0.location.region.label', 'events.cargo_fso_unload_event.0.location.region.layer', 'events.cargo_fso_unload_event.0.location.region.probability', 'events.cargo_fso_unload_event.0.location.region.source', 'events.cargo_fso_unload_event.0.location.shipping_region.id', 'events.cargo_fso_unload_event.0.location.shipping_region.label', 'events.cargo_fso_unload_event.0.location.shipping_region.layer', 'events.cargo_fso_unload_event.0.location.shipping_region.probability', 'events.cargo_fso_unload_event.0.location.shipping_region.source', 'events.cargo_fso_unload_event.0.location.sts_zone.id', 'events.cargo_fso_unload_event.0.location.sts_zone.label', 'events.cargo_fso_unload_event.0.location.sts_zone.layer', 'events.cargo_fso_unload_event.0.location.sts_zone.probability', 'events.cargo_fso_unload_event.0.location.sts_zone.source', 'events.cargo_fso_unload_event.0.location.trading_block.id', 'events.cargo_fso_unload_event.0.location.trading_block.label', 'events.cargo_fso_unload_event.0.location.trading_block.layer', 'events.cargo_fso_unload_event.0.location.trading_block.probability', 'events.cargo_fso_unload_event.0.location.trading_block.source', 'events.cargo_fso_unload_event.0.location.trading_region.id', 'events.cargo_fso_unload_event.0.location.trading_region.label', 'events.cargo_fso_unload_event.0.location.trading_region.layer', 'events.cargo_fso_unload_event.0.location.trading_region.probability', 'events.cargo_fso_unload_event.0.location.trading_region.source', 'events.cargo_fso_unload_event.0.location.trading_subregion.id', 'events.cargo_fso_unload_event.0.location.trading_subregion.label', 'events.cargo_fso_unload_event.0.location.trading_subregion.layer', 'events.cargo_fso_unload_event.0.location.trading_subregion.probability', 'events.cargo_fso_unload_event.0.location.trading_subregion.source', 'events.cargo_fso_unload_event.0.pos.0', 'events.cargo_fso_unload_event.0.pos.1', 'events.cargo_fso_unload_event.0.probability', 'events.cargo_fso_unload_event.0.start_timestamp', 'events.cargo_port_load_event.0.end_timestamp', 'events.cargo_port_load_event.0.event_type', 'events.cargo_port_load_event.0.location.country.id', 'events.cargo_port_load_event.0.location.country.label', 'events.cargo_port_load_event.0.location.country.layer', 'events.cargo_port_load_event.0.location.country.probability', 'events.cargo_port_load_event.0.location.country.source', 'events.cargo_port_load_event.0.location.port.id', 'events.cargo_port_load_event.0.location.port.label', 'events.cargo_port_load_event.0.location.port.layer', 'events.cargo_port_load_event.0.location.port.probability', 'events.cargo_port_load_event.0.location.port.source', 'events.cargo_port_load_event.0.location.region.id', 'events.cargo_port_load_event.0.location.region.label', 'events.cargo_port_load_event.0.location.region.layer', 'events.cargo_port_load_event.0.location.region.probability', 'events.cargo_port_load_event.0.location.region.source', 'events.cargo_port_load_event.0.location.shipping_region.id', 'events.cargo_port_load_event.0.location.shipping_region.label', 'events.cargo_port_load_event.0.location.shipping_region.layer', 'events.cargo_port_load_event.0.location.shipping_region.probability', 'events.cargo_port_load_event.0.location.shipping_region.source', 'events.cargo_port_load_event.0.location.terminal.id', 'events.cargo_port_load_event.0.location.terminal.label', 'events.cargo_port_load_event.0.location.terminal.layer', 'events.cargo_port_load_event.0.location.terminal.probability', 'events.cargo_port_load_event.0.location.terminal.source', 'events.cargo_port_load_event.0.location.trading_block.id', 'events.cargo_port_load_event.0.location.trading_block.label', 'events.cargo_port_load_event.0.location.trading_block.layer', 'events.cargo_port_load_event.0.location.trading_block.probability', 'events.cargo_port_load_event.0.location.trading_block.source', 'events.cargo_port_load_event.0.location.trading_region.id', 'events.cargo_port_load_event.0.location.trading_region.label', 'events.cargo_port_load_event.0.location.trading_region.layer', 'events.cargo_port_load_event.0.location.trading_region.probability', 'events.cargo_port_load_event.0.location.trading_region.source', 'events.cargo_port_load_event.0.location.trading_subregion.id', 'events.cargo_port_load_event.0.location.trading_subregion.label', 'events.cargo_port_load_event.0.location.trading_subregion.layer', 'events.cargo_port_load_event.0.location.trading_subregion.probability', 'events.cargo_port_load_event.0.location.trading_subregion.source', 'events.cargo_port_load_event.0.pos.0', 'events.cargo_port_load_event.0.pos.1', 'events.cargo_port_load_event.0.probability', 'events.cargo_port_load_event.0.start_timestamp', 'events.cargo_port_unload_event.0.end_timestamp', 'events.cargo_port_unload_event.0.event_type', 'events.cargo_port_unload_event.0.location.country.id', 'events.cargo_port_unload_event.0.location.country.label', 'events.cargo_port_unload_event.0.location.country.layer', 'events.cargo_port_unload_event.0.location.country.probability', 'events.cargo_port_unload_event.0.location.country.source', 'events.cargo_port_unload_event.0.location.port.id', 'events.cargo_port_unload_event.0.location.port.label', 'events.cargo_port_unload_event.0.location.port.layer', 'events.cargo_port_unload_event.0.location.port.probability', 'events.cargo_port_unload_event.0.location.port.source', 'events.cargo_port_unload_event.0.location.region.id', 'events.cargo_port_unload_event.0.location.region.label', 'events.cargo_port_unload_event.0.location.region.layer', 'events.cargo_port_unload_event.0.location.region.probability', 'events.cargo_port_unload_event.0.location.region.source', 'events.cargo_port_unload_event.0.location.shipping_region.id', 'events.cargo_port_unload_event.0.location.shipping_region.label', 'events.cargo_port_unload_event.0.location.shipping_region.layer', 'events.cargo_port_unload_event.0.location.shipping_region.probability', 'events.cargo_port_unload_event.0.location.shipping_region.source', 'events.cargo_port_unload_event.0.location.sts_zone.id', 'events.cargo_port_unload_event.0.location.sts_zone.label', 'events.cargo_port_unload_event.0.location.sts_zone.layer', 'events.cargo_port_unload_event.0.location.sts_zone.probability', 'events.cargo_port_unload_event.0.location.sts_zone.source', 'events.cargo_port_unload_event.0.location.terminal.id', 'events.cargo_port_unload_event.0.location.terminal.label', 'events.cargo_port_unload_event.0.location.terminal.layer', 'events.cargo_port_unload_event.0.location.terminal.probability', 'events.cargo_port_unload_event.0.location.terminal.source', 'events.cargo_port_unload_event.0.location.trading_block.id', 'events.cargo_port_unload_event.0.location.trading_block.label', 'events.cargo_port_unload_event.0.location.trading_block.layer', 'events.cargo_port_unload_event.0.location.trading_block.probability', 'events.cargo_port_unload_event.0.location.trading_block.source', 'events.cargo_port_unload_event.0.location.trading_region.id', 'events.cargo_port_unload_event.0.location.trading_region.label', 'events.cargo_port_unload_event.0.location.trading_region.layer', 'events.cargo_port_unload_event.0.location.trading_region.probability', 'events.cargo_port_unload_event.0.location.trading_region.source', 'events.cargo_port_unload_event.0.location.trading_subregion.id', 'events.cargo_port_unload_event.0.location.trading_subregion.label', 'events.cargo_port_unload_event.0.location.trading_subregion.layer', 'events.cargo_port_unload_event.0.location.trading_subregion.probability', 'events.cargo_port_unload_event.0.location.trading_subregion.source', 'events.cargo_port_unload_event.0.pos.0', 'events.cargo_port_unload_event.0.pos.1', 'events.cargo_port_unload_event.0.probability', 'events.cargo_port_unload_event.0.start_timestamp', 'events.cargo_storage_event.0.end_timestamp', 'events.cargo_storage_event.0.event_type', 'events.cargo_storage_event.0.location.country.id', 'events.cargo_storage_event.0.location.country.label', 'events.cargo_storage_event.0.location.country.layer', 'events.cargo_storage_event.0.location.country.probability', 'events.cargo_storage_event.0.location.country.source', 'events.cargo_storage_event.0.location.region.id', 'events.cargo_storage_event.0.location.region.label', 'events.cargo_storage_event.0.location.region.layer', 'events.cargo_storage_event.0.location.region.probability', 'events.cargo_storage_event.0.location.region.source', 'events.cargo_storage_event.0.location.shipping_region.id', 'events.cargo_storage_event.0.location.shipping_region.label', 'events.cargo_storage_event.0.location.shipping_region.layer', 'events.cargo_storage_event.0.location.shipping_region.probability', 'events.cargo_storage_event.0.location.shipping_region.source', 'events.cargo_storage_event.0.location.trading_block.id', 'events.cargo_storage_event.0.location.trading_block.label', 'events.cargo_storage_event.0.location.trading_block.layer', 'events.cargo_storage_event.0.location.trading_block.probability', 'events.cargo_storage_event.0.location.trading_block.source', 'events.cargo_storage_event.0.location.trading_region.id', 'events.cargo_storage_event.0.location.trading_region.label', 'events.cargo_storage_event.0.location.trading_region.layer', 'events.cargo_storage_event.0.location.trading_region.probability', 'events.cargo_storage_event.0.location.trading_region.source', 'events.cargo_storage_event.0.location.trading_subregion.id', 'events.cargo_storage_event.0.location.trading_subregion.label', 'events.cargo_storage_event.0.location.trading_subregion.layer', 'events.cargo_storage_event.0.location.trading_subregion.probability', 'events.cargo_storage_event.0.location.trading_subregion.source', 'events.cargo_storage_event.0.pos.0', 'events.cargo_storage_event.0.pos.1', 'events.cargo_storage_event.0.start_timestamp', 'events.cargo_storage_event.0.vessel_id', 'events.cargo_sts_event.0.end_timestamp', 'events.cargo_sts_event.0.event_type', 'events.cargo_sts_event.0.from_vessel_id', 'events.cargo_sts_event.0.from_vessel_name', 'events.cargo_sts_event.0.location.country.id', 'events.cargo_sts_event.0.location.country.label', 'events.cargo_sts_event.0.location.country.layer', 'events.cargo_sts_event.0.location.country.probability', 'events.cargo_sts_event.0.location.country.source', 'events.cargo_sts_event.0.location.port.id', 'events.cargo_sts_event.0.location.port.label', 'events.cargo_sts_event.0.location.port.layer', 'events.cargo_sts_event.0.location.port.probability', 'events.cargo_sts_event.0.location.port.source', 'events.cargo_sts_event.0.location.region.id', 'events.cargo_sts_event.0.location.region.label', 'events.cargo_sts_event.0.location.region.layer', 'events.cargo_sts_event.0.location.region.probability', 'events.cargo_sts_event.0.location.region.source', 'events.cargo_sts_event.0.location.shipping_region.id', 'events.cargo_sts_event.0.location.shipping_region.label', 'events.cargo_sts_event.0.location.shipping_region.layer', 'events.cargo_sts_event.0.location.shipping_region.probability', 'events.cargo_sts_event.0.location.shipping_region.source', 'events.cargo_sts_event.0.location.sts_zone.id', 'events.cargo_sts_event.0.location.sts_zone.label', 'events.cargo_sts_event.0.location.sts_zone.layer', 'events.cargo_sts_event.0.location.sts_zone.probability', 'events.cargo_sts_event.0.location.sts_zone.source', 'events.cargo_sts_event.0.location.trading_block.id', 'events.cargo_sts_event.0.location.trading_block.label', 'events.cargo_sts_event.0.location.trading_block.layer', 'events.cargo_sts_event.0.location.trading_block.probability', 'events.cargo_sts_event.0.location.trading_block.source', 'events.cargo_sts_event.0.location.trading_region.id', 'events.cargo_sts_event.0.location.trading_region.label', 'events.cargo_sts_event.0.location.trading_region.layer', 'events.cargo_sts_event.0.location.trading_region.probability', 'events.cargo_sts_event.0.location.trading_region.source', 'events.cargo_sts_event.0.location.trading_subregion.id', 'events.cargo_sts_event.0.location.trading_subregion.label', 'events.cargo_sts_event.0.location.trading_subregion.layer', 'events.cargo_sts_event.0.location.trading_subregion.probability', 'events.cargo_sts_event.0.location.trading_subregion.source', 'events.cargo_sts_event.0.pos.0', 'events.cargo_sts_event.0.pos.1', 'events.cargo_sts_event.0.start_timestamp', 'events.cargo_sts_event.0.to_vessel_id', 'events.cargo_sts_event.0.to_vessel_name', 'events.cargo_sts_event.1.end_timestamp', 'events.cargo_sts_event.1.event_type', 'events.cargo_sts_event.1.from_vessel_id', 'events.cargo_sts_event.1.from_vessel_name', 'events.cargo_sts_event.1.location.country.id', 'events.cargo_sts_event.1.location.country.label', 'events.cargo_sts_event.1.location.country.layer', 'events.cargo_sts_event.1.location.country.probability', 'events.cargo_sts_event.1.location.country.source', 'events.cargo_sts_event.1.location.region.id', 'events.cargo_sts_event.1.location.region.label', 'events.cargo_sts_event.1.location.region.layer', 'events.cargo_sts_event.1.location.region.probability', 'events.cargo_sts_event.1.location.region.source', 'events.cargo_sts_event.1.location.shipping_region.id', 'events.cargo_sts_event.1.location.shipping_region.label', 'events.cargo_sts_event.1.location.shipping_region.layer', 'events.cargo_sts_event.1.location.shipping_region.probability', 'events.cargo_sts_event.1.location.shipping_region.source', 'events.cargo_sts_event.1.location.sts_zone.id', 'events.cargo_sts_event.1.location.sts_zone.label', 'events.cargo_sts_event.1.location.sts_zone.layer', 'events.cargo_sts_event.1.location.sts_zone.probability', 'events.cargo_sts_event.1.location.sts_zone.source', 'events.cargo_sts_event.1.location.trading_block.id', 'events.cargo_sts_event.1.location.trading_block.label', 'events.cargo_sts_event.1.location.trading_block.layer', 'events.cargo_sts_event.1.location.trading_block.probability', 'events.cargo_sts_event.1.location.trading_block.source', 'events.cargo_sts_event.1.location.trading_region.id', 'events.cargo_sts_event.1.location.trading_region.label', 'events.cargo_sts_event.1.location.trading_region.layer', 'events.cargo_sts_event.1.location.trading_region.probability', 'events.cargo_sts_event.1.location.trading_region.source', 'events.cargo_sts_event.1.location.trading_subregion.id', 'events.cargo_sts_event.1.location.trading_subregion.label', 'events.cargo_sts_event.1.location.trading_subregion.layer', 'events.cargo_sts_event.1.location.trading_subregion.probability', 'events.cargo_sts_event.1.location.trading_subregion.source', 'events.cargo_sts_event.1.pos.0', 'events.cargo_sts_event.1.pos.1', 'events.cargo_sts_event.1.start_timestamp', 'events.cargo_sts_event.1.to_vessel_id', 'events.cargo_sts_event.1.to_vessel_name', 'product.category.id', 'product.category.label', 'product.category.layer', 'product.category.probability', 'product.category.source', 'product.grade.id', 'product.grade.label', 'product.grade.layer', 'product.grade.probability', 'product.grade.source', 'product.group.id', 'product.group.label', 'product.group.layer', 'product.group.probability', 'product.group.source', 'product.group_product.id', 'product.group_product.label', 'product.group_product.layer', 'product.group_product.probability', 'product.group_product.source', 'quantity', 'status', 'vessels.0.corporate_entities.charterer.id', 'vessels.0.corporate_entities.charterer.label', 'vessels.0.corporate_entities.charterer.layer', 'vessels.0.corporate_entities.charterer.probability', 'vessels.0.corporate_entities.charterer.source', 'vessels.0.corporate_entities.effective_controller.id', 'vessels.0.corporate_entities.effective_controller.label', 'vessels.0.corporate_entities.effective_controller.layer', 'vessels.0.corporate_entities.effective_controller.probability', 'vessels.0.corporate_entities.effective_controller.source', 'vessels.0.corporate_entities.time_charterer.end_timestamp', 'vessels.0.corporate_entities.time_charterer.id', 'vessels.0.corporate_entities.time_charterer.label', 'vessels.0.corporate_entities.time_charterer.layer', 'vessels.0.corporate_entities.time_charterer.probability', 'vessels.0.corporate_entities.time_charterer.source', 'vessels.0.corporate_entities.time_charterer.start_timestamp', 'vessels.0.cubic_capacity', 'vessels.0.dwt', 'vessels.0.end_timestamp', 'vessels.0.fixture_fulfilled', 'vessels.0.fixture_id', 'vessels.0.id', 'vessels.0.imo', 'vessels.0.mmsi', 'vessels.0.name', 'vessels.0.start_timestamp', 'vessels.0.status', 'vessels.0.tags.0.end_timestamp', 'vessels.0.tags.0.start_timestamp', 'vessels.0.tags.0.tag', 'vessels.0.vessel_class', 'vessels.0.voyage_id', 'vessels.1.corporate_entities.charterer.id', 'vessels.1.corporate_entities.charterer.label', 'vessels.1.corporate_entities.charterer.layer', 'vessels.1.corporate_entities.charterer.probability', 'vessels.1.corporate_entities.charterer.source', 'vessels.1.corporate_entities.effective_controller.id', 'vessels.1.corporate_entities.effective_controller.label', 'vessels.1.corporate_entities.effective_controller.layer', 'vessels.1.corporate_entities.effective_controller.probability', 'vessels.1.corporate_entities.effective_controller.source', 'vessels.1.corporate_entities.time_charterer.end_timestamp', 'vessels.1.corporate_entities.time_charterer.id', 'vessels.1.corporate_entities.time_charterer.label', 'vessels.1.corporate_entities.time_charterer.layer', 'vessels.1.corporate_entities.time_charterer.probability', 'vessels.1.corporate_entities.time_charterer.source', 'vessels.1.corporate_entities.time_charterer.start_timestamp', 'vessels.1.cubic_capacity', 'vessels.1.dwt', 'vessels.1.end_timestamp', 'vessels.1.fixture_fulfilled', 'vessels.1.id', 'vessels.1.imo', 'vessels.1.mmsi', 'vessels.1.name', 'vessels.1.start_timestamp', 'vessels.1.status', 'vessels.1.tags.0.end_timestamp', 'vessels.1.tags.0.start_timestamp', 'vessels.1.tags.0.tag', 'vessels.1.vessel_class', 'vessels.1.voyage_id', 'vessels.2.corporate_entities.charterer.id', 'vessels.2.corporate_entities.charterer.label', 'vessels.2.corporate_entities.charterer.layer', 'vessels.2.corporate_entities.charterer.probability', 'vessels.2.corporate_entities.charterer.source', 'vessels.2.corporate_entities.effective_controller.id', 'vessels.2.corporate_entities.effective_controller.label', 'vessels.2.corporate_entities.effective_controller.layer', 'vessels.2.corporate_entities.effective_controller.probability', 'vessels.2.corporate_entities.effective_controller.source', 'vessels.2.corporate_entities.time_charterer.end_timestamp', 'vessels.2.corporate_entities.time_charterer.id', 'vessels.2.corporate_entities.time_charterer.label', 'vessels.2.corporate_entities.time_charterer.layer', 'vessels.2.corporate_entities.time_charterer.probability', 'vessels.2.corporate_entities.time_charterer.source', 'vessels.2.corporate_entities.time_charterer.start_timestamp', 'vessels.2.cubic_capacity', 'vessels.2.dwt', 'vessels.2.end_timestamp', 'vessels.2.id', 'vessels.2.imo', 'vessels.2.mmsi', 'vessels.2.name', 'vessels.2.start_timestamp', 'vessels.2.status', 'vessels.2.tags.0.start_timestamp', 'vessels.2.tags.0.tag', 'vessels.2.vessel_class', 'vessels.2.voyage_id', 'vessels.3.corporate_entities.effective_controller.id', 'vessels.3.corporate_entities.effective_controller.label', 'vessels.3.corporate_entities.effective_controller.layer', 'vessels.3.corporate_entities.effective_controller.probability', 'vessels.3.corporate_entities.effective_controller.source', 'vessels.3.cubic_capacity', 'vessels.3.dwt', 'vessels.3.id', 'vessels.3.imo', 'vessels.3.mmsi', 'vessels.3.name', 'vessels.3.start_timestamp', 'vessels.3.status', 'vessels.3.vessel_class', 'vessels.3.voyage_id', 'parent_ids.0.id', 'parent_ids.0.splinter_timestamp', 'parent_ids.1.id', 'parent_ids.1.splinter_timestamp', ]","title":"Notes"},{"location":"endpoints/cargo_timeseries/","text":"vortexasdk.endpoints.cargo_timeseries Try me out in your browser: CargoTimeSeries CargoTimeSeries(self) search CargoTimeSeries.search(self, filter_activity: str, timeseries_activity: str = None, timeseries_frequency: str = 'day', timeseries_unit: str = 'b', filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_charterers: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_storage_locations: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, disable_geographic_exclusion_rules: bool = None, timeseries_activity_time_span_min: int = None, timeseries_activity_time_span_max: int = None) -> vortexasdk.endpoints.timeseries_result.TimeSeriesResult Find Aggregate flows between regions, for various products, for various vessels, or various corporations. Example questions that can be answered with this endpoint: How many Crude/Condensate barrels have been imported into China each day over the last year? How many tonnes of Fuel Oil has company X exported from the United States each week over the last 2 years? How have long-term Medium-Sour floating storage levels changed over time? Arguments filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state', 'oil_on_water_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_corporations : A corporation ID, or list of corporation IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. disable_geographic_exclusion_rules : This controls a popular industry term \"intra-movements\" and determines the filter behaviour for cargo leaving then entering the same geographic area. timeseries_activity : The cargo movement activity we want to aggregate on. This param defaults to filter_activity if left blank. For example, Let's say we want to aggregate the unloading timestamps of all cargo movements that loaded in 2019, then we'd use filter_time_min and filter_time_max to specify 1st Jan 2019 and 31st Dec 2019 respectively, we'd set filter_activity='loading_state' and timeseries_activity='unloading_state' to filter on loadings but aggregate on unloadings. filter_activity Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. timeseries_frequency : Frequency denoting the granularity of the time series. Must be one of ['day', 'week', 'doe_week', 'month', 'quarter', 'year'] timeseries_unit : A numeric metric to be calculated for each time bucket. Must be one of ['b', 'bpd', 't', 'tpd', 'c', 'cpd'], corresponding to barrels, barrels per day, metric tonnes, metric tonnes per day, cargo movement count, cargo movement count per day, respectively. timeseries_activity_time_span_min : The minimum amount of time in milliseconds accounted for in a time series activity. Can be used to request long-term floating storage. For example, to only return floating storage movements that occurred for more than 14 days enter timeseries_activity_time_span_min=1000 * 60 * 60 * 24 * 14 in conjunction with filter_activity='storing_state' . timeseries_activity_time_span_max : The maximum amount of time in milliseconds accounted for in a time series activity. Can be used to request short-term floating storage. For example, to only return floating storage movements that occurred for less than 14 days enter timeseries_activity_time_span_max=1000 * 60 * 60 * 24 * 14 in conjunction with filter_activity='storing_state' . Returns TimeSeriesResult Example What was the monthly average barrels per day of crude loaded from Rotterdam over the last year? >>> from vortexasdk import CargoTimeSeries, Geographies, Products >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> crude = [p.id for p in Products().search(\"crude\").to_list() if \"Crude\" == p.name] >>> search_result = CargoTimeSeries().search( ... timeseries_unit='bpd', ... timeseries_frequency='month', ... filter_origins=rotterdam, ... filter_products=crude, ... filter_activity='loading_state', ... filter_time_min=datetime(2018, 1, 1), ... filter_time_max=datetime(2018, 12, 31)) >>> df = search_result.to_df() Gives the following: key count value 0 2018-01-01T00:00:00.000Z 0.354839 458665 1 2018-02-01T00:00:00.000Z 0.75 45024 2 2018-03-01T00:00:00.000Z 0.0645161 35663.5 3 2018-04-01T00:00:00.000Z 0.878777 12345.2 4 2018-05-01T00:00:00.000Z 0.455932 9999.32 5 2018-06-01T00:00:00.000Z 0.777667 12234.8 6 2018-07-01T00:00:00.000Z 0.555097 987666 7 2018-08-01T00:00:00.000Z 0.290323 5318008.1 8 2018-09-01T00:00:00.000Z 0.0333333 686888.87 9 2018-10-01T00:00:00.000Z 0.354839 234344 10 2018-11-01T00:00:00.000Z 0.2345 111111 11 2018-12-01T00:00:00.000Z 0.123129 34344.5","title":"Cargo Time Series"},{"location":"endpoints/corporations/","text":"vortexasdk.endpoints.corporations Try me out in your browser: Corporations Corporations(self) Corporations Endpoint. load_all Corporations.load_all(self) -> vortexasdk.endpoints.corporations_result.CorporationsResult Load all corporations. search Corporations.search(self, term: Union[str, List[str]] = None, exact_term_match: bool = False) -> vortexasdk.endpoints.corporations_result.CorporationsResult Find all Corporations matching given search terms. Arguments term : The corporation name(s) we're filtering on exact_term_match : Search on only exact term matches, or allow similar matches. e.g. When searching for \"COS\" with exact_term_match=False , then the SDK will yield corporations named ['COSCO', 'COSMO OIL'] etc. When exact_term_match=True , the SDK won't yield any results, because no corporations have the exact name \"COS\". Returns List of corporation matching term Examples Let's load all corporations >>> from vortexasdk import Corporations >>> df = Corporations().search().to_df() returns id name corporate_entity_type 0 04f418ee78c1e17744ad653e7815e8e28891ed9ba25a8427030e4478e5c00974 3J ['effective_controller'] 1 b6384cf17f1639a64bbff04cfd32257bf732a3a13e4b0532802a9ae84a36be34 5XJAPANESE ['effective_controller'] Let's find all corporations with 'do' in the name. >>> [x.name for x in Corporations().search(term=\"do\").to_list()] [...] Further Documentation VortexaAPI Corporation Reference reference Corporations.reference(self, id: str) -> Dict Perform a corporation lookup. Arguments id : Corporation ID to lookup Returns Corporation record matching the ID Further Documentation: VortexaAPI Corporation Reference Examples Corporations().reference(id='12345abcdef') # doctest: +SKIP vortexasdk.endpoints.corporations_result CorporationsResult CorporationsResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Vessels endpoint. to_list CorporationsResult.to_list(self) -> List[vortexasdk.api.corporation.Corporation] Represent vessels as a list. to_df CorporationsResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent corporations as a pd.DataFrame . Arguments columns : The corporation features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'corporate_entity_type'] . Returns pd.DataFrame of corporations.","title":"Corporations"},{"location":"endpoints/destination_breakdown/","text":"vortexasdk.endpoints.destination_breakdown Try me out in your browser: DestinationBreakdown DestinationBreakdown(self) search DestinationBreakdown.search(self, breakdown_geography: str = 'country', breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Destination locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_geography : Geography hierarchy of the origin to aggregate upon. Must be one of the following: 'terminal' , 'port' , 'country' , 'shipping_region' , 'region' , 'trading_block' , 'trading_region' , 'trading_subregion' , 'sts_zone' , 'waypoint' . breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example _Breakdown by destination terminal of cargoes departing from the port of origin over the last 5 days, in tonnes. >>> from vortexasdk import DestinationBreakdown, Geographies >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = DestinationBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_geography=\"terminal\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key label value count 0 606e73162cfd0492919ef96b04dae1bfddda09d148d03bafc1dc3eab979a9b0a SPSE - DPF - G.I.E. Petroleum Terminal 785819 12 1 844756c877c680ce0ff582a46b5bb1cf34cc33179df977a609b2c10838d9db5d SK Energy (Ulsan) 288529 11 2 a5269f5a20759b3a120af66a298fa2385a2b81d8b248aec590db73ecd984f8b7 Dongying CNOOC Oil & Petrochemicals Shandong 201283 11 3 78fcabe3bb6a47f2aa019ae9948be43c5ebbe08a2d1cba7b113315c85362cb7c Kandla Oil Terminal 121762 15 4 15db6ca55a3b13d3c4b135afcaf87f5d605680ac75177412af05be37fc3fec38 Pirpau Island 62933 12","title":"Destination Breakdown"},{"location":"endpoints/eia_forecasts/","text":"vortexasdk.endpoints.eia_forecasts Try me out in your browser: EIAForecasts EIAForecasts(self) EIA forecasts Endpoint, use this to search through Vortexa's EIA Forecasts data. The data includes: date : date of the forecast forecast_fri : Vortexa's data science based forecast of the EIA number to be published on the week value : Actual EIA import/export numbers as published by the EIA Weekly Supply Estimates report stocks : EIA stocks (kbl) cover : Cover (days of Supply for the whole of the US, as published by the EIA Weekly Supply Estimates report) runs : refinery runs (refiner \u201cPercent Operable Utilization\u201d as published by the EIA Weekly Supply Estimates report) search EIAForecasts.search(self, preset: str = 'padd1-gasoline-imports', filter_time_min: datetime.datetime = datetime.datetime(2020, 1, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2020, 1, 31, 0, 0)) -> vortexasdk.endpoints.eia_forecasts_result.EIAForecastResult Find EIA forecasts for a given preset and date range. Arguments preset : Use to specify what geography and product information you would like to query. Preset can be : 'padd1-gasoline-imports', 'padd3-gasoline-imports', 'padd5-gasoline-imports', 'us-gasoline-exports', 'padd1-crude-imports', 'padd3-crude-imports', 'padd5-crude-imports', 'us-crude-exports', 'padd1-diesel-imports', 'padd3-diesel-imports', 'padd5-diesel-imports', 'us-diesel-exports', 'padd1-jet-imports', 'padd5-jet-imports', 'us-jet-exports', 'padd1-fueloil-imports', 'padd3-fueloil-imports', 'padd5-fueloil-imports' or 'us-fueloil-exports' filter_time_min : The UTC start date of the time filter filter_time_max : The UTC end date of the time filter Returns List of EIA Forecast object matching selected 'preset'. Examples Find PADD5 gasoline imports EIA forecasts from January 2019. >>> from datetime import datetime >>> from vortexasdk import EIAForecasts >>> df = EIAForecasts().search( ... preset=\"padd5-gasoline-imports\", ... filter_time_min=datetime(2020, 1, 1), ... filter_time_max=datetime(2020, 1, 31) ... ).to_df() returns date forecast_fri value stocks cover runs 2020-01-31T00:00:00.000Z 454.96048964485 323 9541 26.5 65.9 2020-01-24T00:00:00.000Z 545.453497230504 579 10461 25.9 61.5 2020-01-17T00:00:00.000Z 510.289752707662 549 10325 25.2 64.7 2020-01-10T00:00:00.000Z 469.841470826967 2020-01-03T00:00:00.000Z 640.443229654771 Some values can be NULL: value, stocks, cover, runs. It can happen when: it's a very recent forecast, the Vortexa's data science based forecast (forecast_fri) is available but the complete EIA data isn't yet it's an older forecast and the data is not available vortexasdk.endpoints.eia_forecasts_result EIAForecastResult EIAForecastResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the EIAForecasts endpoint. to_list EIAForecastResult.to_list(self) -> List[vortexasdk.api.eia_forecast.EIAForecast] Represent EIAForecast data as a list. to_df EIAForecastResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent EIA forecasts as a pd.DataFrame . Arguments columns : The EIA forecasts columns we want in the dataframe. Enter columns='all' to include all columns. Defaults to columns = ['date', 'forecast_fri', 'value', 'stocks', 'cover', 'runs'] . Returns pd.DataFrame of EIA forecasts.","title":"EIA Forecasts"},{"location":"endpoints/fixtures/","text":"vortexasdk.endpoints.fixtures Try me out in your browser: Fixtures Fixtures(self) Fixtures Endpoint, use this to search through Vortexa's Fixtures data. A detailed explanation of the fixtures can be found here . Limitation Fixtures are available through the UI, API & SDK only by permission from our shipbroker partner only. If we limit API access to certain products then you can only pull the fixtures data for said product e.g. CPP only, then they only have access to CPP fixtures. For an API key to have access to the endpoint, it needs the scopes \"v.r.fix\" and \"v.r.ais\". What conditions produce a Fixture 'fulfilled' status? - Internal In terms of the data, we use the laycan and the mapped fixture 'origin'. Historical movements: The start timestamp of the loading event must be within the 5-day laycan window (even if the laycan window is less than 5 days, we expand it to 5), or the laycan must be within the start and end timestamp of the loading event and the fixture's origin hierarchy must agree with the actual loading polygon's hierarchy. Future movements: The vessel can be in the reported fixture origin within the laycan window give or take 3 days. We also compare the predicted destination's hierarchy with the fixture's origin hierarchy. An agreement (given that the previous feasibility condition is met) is a sufficient condition to create a movement. When there is disagreement or we don't have a predicted destination, we take into account other factors (e.g. if the destination is a waypoint, we treat them as agreeing). Fixture status Fixture status indicates the point that the deal has reached in its evolution from \"Subs\" for vessels on subjects, to \"Fxd\" for fixed vessels or sometimes \"Failed\" or \"FLD\" for failed fixtures or sometimes \"RPLC\" for a replacement fixture or \"Conf\" for confirmed and \"Corr\" for corrected. What does the model do in the case of exact duplicates? For historical movements, we don't have a particular logic. For future movements, we apply our own internal sorting procedure. When we have 2 fixtures that are near exact duplicates but with different freight rates or different charterers, how does the model pick? For historical movements, randomly. For future movements, we apply our own internal sorting procedure. search Fixtures.search(self, filter_time_field: str = 'fixing_timestamp', filter_time_min: datetime.datetime = datetime.datetime(2020, 1, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2020, 1, 2, 0, 0), ids: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_flags: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, order: str = None, order_direction: str = None, size: int = None) -> vortexasdk.endpoints.fixtures_result.FixtureResult Find Fixtures matching filters and date range. Arguments filter_time_field : The field that the time range should be filtered against. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. ids : Filter specific fixtures. filter_charterers : A charterer ID, or list of charterer IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_flags : A vessel flag, or list of vessel flags to filter on. exclude_origins : A geography ID, or list of geography IDs to exclude. exclude_destinations : A geography ID, or list of geography IDs to exclude. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_charterers : A charterer ID, or list of charterer IDs to exclude. exclude_vessel_flags : A geography ID, or list of geography IDs to exclude. order : Used to sort the returned results. order_direction : Determines the direction of sorting. Returns List of Fixtures objects. Examples Find Fixtures from January 2020. >>> from datetime import datetime >>> from vortexasdk import Fixtures >>> df = Fixtures().search( ... filter_time_field=\"fixing_timestamp\", ... filter_time_min=datetime(2020, 1, 1), ... filter_time_max=datetime(2020, 1, 2), ... ).to_df() returns vessel.name tones origin.label product.label ALPINE EAGLE 454.96048964485 UK Crude Some values can be NULL: value, stocks, cover, runs. It can happen when: The fixture scope is needed to access this endpoint. vortexasdk.endpoints.fixtures_result FixtureResult FixtureResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Fixtures endpoint. to_list FixtureResult.to_list(self) -> List[vortexasdk.api.fixture.Fixture] Represent Fixtures data as a list. to_df FixtureResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent Fixtures as a pd.DataFrame . Arguments columns : The Fixtures columns we want in the dataframe. Defaults to columns = [ \"id\", 'vessels.corporate_entities.charterer.id', 'vessels.corporate_entities.charterer.label', 'vessels.corporate_entities.charterer.layer', 'vessels.corporate_entities.charterer.probability', 'vessels.corporate_entities.charterer.source', 'vessels.corporate_entities.effective_controller.id', 'vessels.corporate_entities.effective_controller.label', 'vessels.corporate_entities.effective_controller.layer', 'vessels.corporate_entities.effective_controller.probability', 'vessels.corporate_entities.effective_controller.source', 'vessels.corporate_entities.time_charterer.end_timestamp', 'vessels.corporate_entities.time_charterer.id', 'vessels.corporate_entities.time_charterer.label', 'vessels.corporate_entities.time_charterer.layer', 'vessels.corporate_entities.time_charterer.probability', 'vessels.corporate_entities.time_charterer.source', 'vessels.corporate_entities.time_charterer.start_timestamp', 'vessels.cubic_capacity', 'vessels.dwt', 'vessels.end_timestamp', 'vessels.fixture_fulfilled', 'vessels.fixture_id', 'vessels.id', 'vessels.imo', 'vessels.mmsi', 'vessels.name', 'vessels.start_timestamp', 'vessels.status', 'vessels.tags.end_timestamp', 'vessels.tags.start_timestamp', 'vessels.tags.tag', 'vessels.vessel_class', 'vessels.voyage_id', \"laycan_from\", \"laycan_to\", \"tones\", \"fixing_timestamp\", \"fulfilled\", \"vtx_fulfilled\", \"destination.label\", \"destination.id\", \"origin.label\", \"origin.id\", \"product.label\", \"product.id\", \"charterer.label\", \"charterer.id\", ] . A near complete list of columns is given below [ \"id\", \"vessel.id\", \"vessel.name\", \"laycan_from\", \"laycan_to\", \"tones\", \"fixing_timestamp\", \"fulfilled\", \"vtx_fulfilled\", \"destination.label\", \"origin.label\", \"product.label\", \"charterer.label\", ] Returns pd.DataFrame of Fixtures.","title":"Fixtures"},{"location":"endpoints/fixtures/#what-does-the-model-do-in-the-case-of-exact-duplicates","text":"For historical movements, we don't have a particular logic. For future movements, we apply our own internal sorting procedure.","title":"What does the model do in the case of exact duplicates?"},{"location":"endpoints/fixtures/#when-we-have-2-fixtures-that-are-near-exact-duplicates-but-with-different-freight-rates-or-different-charterers-how-does-the-model-pick","text":"For historical movements, randomly. For future movements, we apply our own internal sorting procedure.","title":"When we have 2 fixtures that are near exact duplicates but with different freight rates or different charterers, how does the model pick?"},{"location":"endpoints/freight_pricing_search/","text":"vortexasdk.endpoints.freight_pricing_search Try me out in your browser: FreightPricingSearch FreightPricingSearch(self) Freight Pricing Endpoint, use this to search through Vortexa's Baltic Exchange pricing data. search FreightPricingSearch.search(self, routes: Union[List[str], str] = None, days: List[datetime.datetime] = [], offset: int = None, order: str = None, order_direction: str = None) -> vortexasdk.endpoints.freight_pricing_result.FreightPricingResult List of pricing information applicable for a specified route on a given day. Arguments routes : Used to filter by specific routes. Must be one of the following: Clean routes - TC1 , TC2_37 , TC5 , TC6 , TC7 , TC8 , TC9 , TC10 , TC11 , TC12 , TC14 , TC15 , TC16 , TC17 , TC18 , TC19 . Dirty routes - TD1 , TD2 , TD3C , TD6 , TD7 , TD8 , TD9 , TD12 , TD14 , TD15 , TD17 , TD18 , TD19 , TD20 , TD21 , TD22 , TD23 , TD24 , TD25 , TD26 . BLPG routes - BLPG1 , BLPG2 , BLPG3 . days : Used to filter results by day on which the record was generated. Must be an ISO date array or not supplied. order : Used to sort the returned results. Must be either 'record_date' or not supplied. order_direction : Determines the direction of sorting. \u2018asc\u2019 for ascending, \u2018desc\u2019 for descending. offset : Used to page results. The offset from which records should be returned. size : Used to page results. The size of the result set. Between 0 and 500. Returns FreightPricingResult Example WS rate for the TD3C route generated on 15th Nov 2021. >>> from vortexasdk import FreightPricingSearch >>> from datetime import datetime >>> day = [datetime(2021, 11, 15)] >>> df = FreightPricingSearch().search( ... routes=['TD3C'], ... days=day ... ).to_df(columns=['short_code','rate','rate_unit']) short_code rate rate_unit 0 TD3C 43.32 WS","title":"Search"},{"location":"endpoints/freight_pricing_timeseries/","text":"vortexasdk.endpoints.freight_pricing_timeseries Try me out in your browser: FreightPricingTimeseries FreightPricingTimeseries(self) search FreightPricingTimeseries.search(self, time_min: datetime.datetime = datetime.datetime(2021, 9, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2021, 11, 1, 0, 0), routes: Union[List[str], str] = None, breakdown_frequency: str = None, breakdown_property: str = None) -> vortexasdk.endpoints.timeseries_result.TimeSeriesResult Time series of the selected pricing information for given routes in the specified time range. Arguments time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. breakdown_frequency : Must be one of: 'day' , 'week' , 'doe_week' , 'month' , 'quarter' or 'year' . breakdown_property : Property used to build the value of the aggregation. Must be one of the following: route , cost , tce . routes : Used to filter by specific routes. Must be one of the following: Clean routes - TC1 , TC2_37 , TC5 , TC6 , TC7 , TC8 , TC9 , TC10 , TC11 , TC12 , TC14 , TC15 , TC16 , TC17 , TC18 , TC19 . Dirty routes - TD1 , TD2 , TD3C , TD6 , TD7 , TD8 , TD9 , TD12 , TD14 , TD15 , TD17 , TD18 , TD19 , TD20 , TD21 , TD22 , TD23 , TD24 , TD25 , TD26 . BLPG routes - BLPG1 , BLPG2 , BLPG3 . Returns TimeSeriesResult Example Time series for the WS rate of the TD3C route between 1st and 15th November 2021. >>> from vortexasdk import FreightPricingTimeseries >>> from datetime import datetime >>> start = datetime(2021, 11, 1) >>> end = datetime(2021, 11, 15) >>> df = (FreightPricingTimeseries().search( ... time_min=start, ... time_max=end, ... routes=['TD3C'], ... breakdown_property='rate', ... breakdown_frequency='day') ... .to_df()).head(2) Gives the following: key value count 0 2021-11-01 00:00:00+00:00 46.04999923706055 1 1 2021-11-02 00:00:00+00:00 45.13999938964844 1","title":"Time Series"},{"location":"endpoints/geographies/","text":"vortexasdk.endpoints.geographies Try me out in your browser: Geographies Geographies(self) Geographies endpoint. load_all Geographies.load_all(self) -> vortexasdk.endpoints.geographies_result.GeographyResult Load all geographies. search Geographies.search(self, term: Union[str, List[str]] = None, exact_term_match: bool = False, filter_layer: str = None) -> vortexasdk.endpoints.geographies_result.GeographyResult Find all geographies matching given search terms. Arguments term : The geography name (or names) we're filtering on exact_term_match : Search on only exact term matches, or allow similar matches. e.g. When searching for \"China\" with exact_term_match=False , then the SDK will yield geographies named ['China', 'South China', 'China Energy Services Ningbo'...] etc. When exact_term_match=True , the SDK will only yield the geography named China . filter_layer : Must be one of geographical type ['terminal', 'port', 'country', 'shipping_region', 'region', 'trading_block', 'trading_region', 'trading_subregion', 'sts_zone', 'waypoint', 'storage', 'root']. Returns List of geographies matching term Examples Find all geographies with portsmouth in the name. >>> from vortexasdk import Geographies >>> [x.name for x in Geographies().search(term=\"portsmouth\").to_list()] ['Portsmouth [GB]', 'Portsmouth, NH [US]'] Search multiple geography terms >>> df = Geographies().search(term=[\"Liverpool\", \"Southampton\"]).to_df() returns id name layer 0 b63d8f625669fd... Liverpool [GB] ['port'] 1 0cb7d4566de0f2... Southampton [GB] ['port'] 2 8b4273e3181f2d... Liverpool Docks ['terminal'] 3 98c50b0d2ee2b1... Liverpool Bulk Liquids ['terminal'] reference Geographies.reference(self, id: str) -> Dict Perform a geography lookup. Arguments id : Geography ID to lookup Returns Geography matching the ID Further Documentation: VortexaAPI Geography Reference vortexasdk.endpoints.geographies_result GeographyResult GeographyResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Geography endpoint. to_list GeographyResult.to_list(self) -> List[vortexasdk.api.geography.Geography] Represent geographies as a list. to_df GeographyResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent geographies as a pd.DataFrame . Arguments columns : The geography features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'layer'] . Returns pd.DataFrame of geographies.","title":"Geographies"},{"location":"endpoints/movement_status_breakdown/","text":"vortexasdk.endpoints.movement_status_breakdown Try me out in your browser: MovementStatusBreakdown MovementStatusBreakdown(self) search MovementStatusBreakdown.search(self, timestamp: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Origin locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . timestamp : The UTC date of the specific movement status to search. breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by movement status of cargoes in tonnes. >>> from vortexasdk import MovementStatusBreakdown >>> start = datetime(2019, 11, 10) >>> timestamp = datetime(2019, 1, 19) >>> end = datetime(2033, 11, 15) >>> df = MovementStatusBreakdown().search( ... filter_activity=\"loading_end\", ... timestamp=timestamp, ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key value count 0 transiting_state 3009799 24 1 identified_for_loading_state 776599 17 2 loading_state 381359 24 3 unloading_state 238723 21 4 storing_state 118285 18 4 ship_to_ship 118285 18","title":"Movement Status Breakdown"},{"location":"endpoints/onshore_inventories_search/","text":"vortexasdk.endpoints.onshore_inventories_search Try me out in your browser: OnshoreInventoriesSearch OnshoreInventoriesSearch(self) Crude Onshore Inventories Endpoint, use this to search through Vortexa's Onshore Inventory data. Please note: you will require a subscription to our Crude Onshore Inventories module to access this endpoint. search OnshoreInventoriesSearch.search(self, corporate_entity_ids: Union[str, List[str]] = None, crude_confidence: List[str] = None, location_ids: Union[str, List[str]] = None, storage_types: List[str] = None, time_min: datetime.datetime = datetime.datetime(2023, 8, 31, 10, 23, 46, 804267), time_max: datetime.datetime = datetime.datetime(2023, 9, 7, 10, 23, 46, 804281)) -> vortexasdk.endpoints.onshore_inventories_result.OnshoreInventoriesResult List of crude onshore inventories across the globe. Arguments corporate_entity_ids : An array of owner ID(s) to filter on. crude_confidence : An array of confidence metrics to filter on. Possible values are: 'confirmed\u2019 , \u2018probable\u2019 , \u2018unlikely\u2019 location_ids : An array of geography ID(s) to filter on. measurement_ids : An array of unique measurements (each COI measurement) to filter on. size : Used to page results. The size of the result set. Between 0 and 500. storage_types : An array of storage types to filter on. Possible values are: 'refinery' , 'non-refinery' , 'commercial' , 'spr' , 'tbd' ,. time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. Returns OnshoreInventoriesResult Data frame example Top 5 Crude Onshore Inventories where 'crude_confidence' status is 'confirmed'. >>> from vortexasdk import OnshoreInventoriesSearch >>> df = OnshoreInventoriesSearch().search(crude_confidence=['confirmed']).to_df().head(5) measurement_id tank_id tank_details.capacity_bbl ... fill_bbl fill_tons fill_cbm 0 1e41bdfc8fa21a1f3d874d41a af83f5475ebd45b9167254667 225055 ... 194898 26648.208642 30986.443224 1 211d96e43ff6893d555f8e7e0 f7c583b26ff8d4e50d4ba9da5 658327 ... 131804 18021.429116 20955.254352 2 5ef5595cadf0161f6b59a0769 7047360864070b7a08802ae82 209196 ... 468790 64097.187910 74531.984520 3 b70f105d6309fb1acdb4b18c5 2ae82a3b79f32105716725460 664169 ... 105934 14484.249886 16842.234792 4 72841f54183a082de91d9er43 802ae82a3b79f321167254667 75669 ... 474814 64920.843406 75489.728232 List example First COI object in the list where 'crude_confidence' status is 'confirmed'. >>> from vortexasdk import OnshoreInventoriesSearch >>> list = OnshoreInventoriesSearch().search(crude_confidence='confirmed').to_list()[0] { measurement_id: '5731385e7b0ce8', tank_id: 'c5a6bf5e95e969cf7', tank_details: { id: 'c5a6bf5e95e969cf7', capacity_bbl: 875573, capacity_cbm: 139205, capacity_ton: 119716, corporate_entity_details: { id: 'b25523ae823b9e38bb11a161eb60d42194f1a886e58dfe39592dcc324f06f60e', label: 'Repsol' }, corporate_entity_id: 'b25523ae823b9e38bb11a161eb60d42194f1a886e58dfe39592dcc324f06f60e', crude_confidence: 'confirmed', last_updated: '2021-08-03T14: 34: 18.533Z', leaf: True, location_id: 'a98c21d06633d86c8c55', location_details: [ { id: 'a98c21d06633d86c8c55', label: 'CartagenaLNGTerminal(Enagas)', layer: 'terminal' }, { id: 'c7baa1cfb2a11e7c2eca', label: 'Cartagena[ ES ]', layer: 'port' } ], name: 'CGA030', pos: (-0.926539, 37.574), radius: 45, ref_type: 'asset_tank', storage_terminal_id: 'e757382d4aa5a8aa77d0f11ac7f535fb32993bae89bdf581771f155d1c0149b8', storage_terminal_name: 'RepsolCartagenaRefinery', storage_type: 'refinery' }, measurement_timestamp: '2021-09-06T17: 50: 12', publish_timestamp: '2021-09-08T13: 59: 45', report_timestamp: '2021-09-09T14: 00: 00', carry_forward: False, fill_bbl: 732345, fill_tons: 100132.79950499999, fill_cbm: 116434.06685999999, reference_data_version: '20210906-1631611377217' }","title":"Search"},{"location":"endpoints/onshore_inventories_timeseries/","text":"vortexasdk.endpoints.onshore_inventories_timeseries Try me out in your browser: OnshoreInventoriesTimeseries OnshoreInventoriesTimeseries(self) Please note: you will require a subscription to our Crude Onshore Inventories module to access this endpoint. search OnshoreInventoriesTimeseries.search(self, corporate_entity_ids: Union[str, List[str]] = None, crude_confidence: List[str] = None, location_ids: Union[str, List[str]] = None, storage_types: List[str] = None, asset_tank_ids: Union[str, List[str]] = None, time_max: datetime.datetime = None, time_min: datetime.datetime = None, timeseries_frequency: str = None, timeseries_split_property: str = None, timeseries_unit: str = None, timeseries_unit_operator: str = None, exclude_corporate_entity_ids: List[str] = None, exclude_crude_confidence: List[str] = None, exclude_location_ids: Union[str, List[str]] = None, exclude_storage_types: List[str] = None) -> vortexasdk.endpoints.breakdown_result.BreakdownResult Sum of crude onshore inventories storage and total capacity updated weekly. For frequencies other than 'week', the values returned are calculated by returning the final weekly onshore inventories 'quantity' bucket for the specified period. Arguments corporate_entity_ids : An array of owner ID(s) to filter on. crude_confidence : An array of confidence metrics to filter on. Possible values are: 'confirmed\u2019 , \u2018probable\u2019 , \u2018unlikely\u2019 location_ids : An array of geography ID(s) to filter on. storage_types : An array of storage types to filter on. Possible values are: 'refinery' , 'non-refinery' , 'commercial' , 'spr' , 'tbd' asset_tank_ids : An array of unique Asset Tanks ID(s) to filter on - linked to the Asset Tank Reference data. time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. timeseries_frequency : Frequency denoting the granularity of the time series. Must be one of the following: 'week' , 'month' , 'year' . timeseries_split_property : Property used to breakdown the aggregation. By default 'quantity' is used which returns only the total, but aggregations can be broken down by either 'crude_confidence' , 'storage_type' , 'location_country' , 'location_port' , 'location_region' , 'location_shipping_region' , 'location_trading_region' , 'location_trading_subregion' timeseries_unit : A numeric metric to be calculated for each time bucket. Must be either 'b' , 't' , 'cbm' corresponding to barrels, metric tonnes, cubic meters. timeseries_unit_operator : Argument must be either 'fill' (total in storage) or 'capacity' (total capacity). exclude_corporate_entity_ids : An array of owner ID(s) to exclude from the results, exclude_crude_confidence : An array of confidence metrics to exclude from the results exclude_location_ids : An array of geography ID(s) to exclude from the results exclude_storage_types : An array of storage types to exclude from the results Returns BreakdownResult Example Total storage capacity across Europe for the first week of January 2021. >>> from vortexasdk import OnshoreInventoriesTimeseries >>> from datetime import datetime >>> search_result = OnshoreInventoriesTimeseries().search( ... location_ids=[\"f39d455f5d38907394d6da3a91da4e391f9a34bd6a17e826d6042761067e88f4\"], ... time_min=datetime(2021, 1, 5), ... time_max=datetime(2021, 1, 12), ... timeseries_frequency=\"week\", ... timeseries_split_property=\"location_country\", ... timeseries_unit=\"b\", ... timeseries_unit_operator=\"capacity\", ... ).to_list() Gives the following result: [ BreakdownItem(key='2021-09-09T14:00:00.000Z', count=3769, value=994621677, breakdown=[ { 'id': 'ee1de4914cc26e8f1326b49793b089131870d478714c07e0c99c56cb307704c5', 'label': 'Italy', 'value': 204482432, 'count': 762 }, { 'id': '2aaad41b89dfad19e5668918018ae02695d7710bcbe5f2dc689234e8da492de3', 'label': 'UnitedKingdom', 'value': 113001186, 'count': 415 }, { 'id': '284c8d9831e1ac59c0cb714468856d561af722c8a2432c13a001f909b97e6b71', 'label': 'Germany', 'value': 93583672, 'count': 405 }, { 'id': 'e9e556620469f46a4dc171aef71073f5286a288da35c5883cac760446b0ceb46', 'label': 'France', 'value': 86652291, 'count': 327 }, ... ]) ]","title":"Time Series"},{"location":"endpoints/origin_breakdown/","text":"vortexasdk.endpoints.origin_breakdown Try me out in your browser: OriginBreakdown OriginBreakdown(self) search OriginBreakdown.search(self, breakdown_geography: str = 'country', breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Origin locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_geography : Geography hierarchy of the origin to aggregate upon. Must be one of the following: 'terminal' , 'port' , 'country' , 'shipping_region' , 'region' , 'trading_block' , 'trading_region' , 'trading_subregion' , 'sts_zone' , 'waypoint' . breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by origin terminal of cargoes departing from the port of origin over the last 5 days, in tonnes. >>> from vortexasdk import OriginBreakdown, Geographies >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = OriginBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_geography=\"terminal\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key label value count 0 c3daea3cc9c5b3bd91c90882d42c2a418c4cf17b90ff12da3ac78444282a238a Juaymah Crude Oil Terminal 3009799 24 1 3a39cf841ece0c7cb879f72af01cb634191142e0de8010d5ef877fd66c2e8605 Houston Enterprise Terminal 776599 17 2 345b7661310bc82a04e0a4edffd02c286c410c023b53edfb90ed3386640c0476 Arzew GL1Z/GL2Z LNG Terminal 381359 24 3 9dfa3be1b42d1f5e80361b6f442b5217b486876ad0c25e382055887c9e231ad2 SabTank (PCQ-1) Al Jubail 238723 21 4 4813dd7209e85b128cc2fbc7c08fef08d26259550210f28a5c7ff3ccd7b2ba61 Mailiao Industrial Park-Formosa Plastics 118285 18","title":"Origin Breakdown"},{"location":"endpoints/parent_product_breakdown/","text":"vortexasdk.endpoints.parent_product_breakdown Try me out in your browser: ParentProductBreakdown ParentProductBreakdown(self) search ParentProductBreakdown.search(self, breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Origin locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by product of the cargoes in tonnes. >>> from vortexasdk import ParentProductBreakdown >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = ParentProductBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key label value count 0 c3daea3cc9c5b3bd91c90882d42c2a418c4cf17b90ff12da3ac78444282a238a Juaymah Crude Oil Terminal 3009799 24 1 3a39cf841ece0c7cb879f72af01cb634191142e0de8010d5ef877fd66c2e8605 Houston Enterprise Terminal 776599 17 2 345b7661310bc82a04e0a4edffd02c286c410c023b53edfb90ed3386640c0476 Arzew GL1Z/GL2Z LNG Terminal 381359 24 3 9dfa3be1b42d1f5e80361b6f442b5217b486876ad0c25e382055887c9e231ad2 SabTank (PCQ-1) Al Jubail 238723 21 4 4813dd7209e85b128cc2fbc7c08fef08d26259550210f28a5c7ff3ccd7b2ba61 Mailiao Industrial Park-Formosa Plastics 118285 18","title":"Parent Product Breakdown"},{"location":"endpoints/product_breakdown/","text":"vortexasdk.endpoints.product_breakdown Try me out in your browser: ProductBreakdown ProductBreakdown(self) search ProductBreakdown.search(self, breakdown_product: str = 'group', breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Origin locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_product : This represents the different levels of the product hierarchy. With 'group' being the top level, and grade being the lowest - or most accurate. 'group', 'group_product', 'category', 'grade' breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by product of the cargoes in tonnes. >>> from vortexasdk import ProductBreakdown >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = ProductBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_product=\"group\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key label value count 0 c3daea3cc9c5b3bd91c90882d42c2a418c4cf17b90ff12da3ac78444282a238a Juaymah Crude Oil Terminal 3009799 24 1 3a39cf841ece0c7cb879f72af01cb634191142e0de8010d5ef877fd66c2e8605 Houston Enterprise Terminal 776599 17 2 345b7661310bc82a04e0a4edffd02c286c410c023b53edfb90ed3386640c0476 Arzew GL1Z/GL2Z LNG Terminal 381359 24 3 9dfa3be1b42d1f5e80361b6f442b5217b486876ad0c25e382055887c9e231ad2 SabTank (PCQ-1) Al Jubail 238723 21 4 4813dd7209e85b128cc2fbc7c08fef08d26259550210f28a5c7ff3ccd7b2ba61 Mailiao Industrial Park-Formosa Plastics 118285 18","title":"Product Breakdown"},{"location":"endpoints/products/","text":"vortexasdk.endpoints.products Try me out in your browser: Products Products(self) Products endpoint. load_all Products.load_all(self) -> vortexasdk.endpoints.products_result.ProductResult Load all products. search Products.search(self, term: Union[str, List[str]] = None, ids: Union[str, List[str]] = None, product_parent: Union[str, List[str]] = None, exact_term_match: bool = False, filter_layer: str = None) -> vortexasdk.endpoints.products_result.ProductResult Find all products matching given search terms. Arguments term : The name(s) (or partial name(s)) of a product we'd like to search ids : ID or IDs of products we'd like to search product_parent : ID, or list of IDs of the immediate product parent. E.g. product_parent ='12345' will return all children of product 12345 . exact_term_match : By default, the SDK returns all products which name includes the search term. For example, searching for \"Gasoil\" will return results including \"Gasoil\", \"Gasoil 0.4pc\", \"Gasoil 500ppm\" etc. Setting exact_search_match to true ensure that only exact term matches are returned, ie just \"Gasoil\" in this case. filter_layer : Must be one of product types ['group', 'group_product', 'category', 'grade']. Returns List of products matching the search arguments. Examples Let's look for products with in one of ['diesel', 'fuel oil', 'grane'] their name, or related names. >>> from vortexasdk import Products >>> df = Products().search(term=['diesel', 'fuel oil', 'grane']).to_df('all') Returns id name layer.0 leaf parent.0.name parent.0.layer.0 parent.0.id meta.api_min meta.api_max ref_type meta.sulphur_min meta.sulphur_max 0 1c107b4317bc2c8... Fuel Oil category False Dirty products product 5de0b00094e0fd... 12.8878 12.8878 product nan nan 1 fddedd17e02507f... Grane grade True Medium-Sour subproduct_group a7e26956fbb917... 29.2955 29.2955 product 0.62 0.62 2 deda35eb9ca56b5... Diesel/Gasoil category False Clean products product b68cbb7746f8b9... 35.9556 35.9556 product nan nan Further Documentation VortexaAPI Product Reference reference Products.reference(self, id: str) -> Dict Perform a product lookup. Arguments id : Product ID to lookup Returns Product record matching the ID Further Documentation: VortexaAPI Product Reference vortexasdk.endpoints.products_result ProductResult ProductResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Product endpoint. to_list ProductResult.to_list(self) -> List[vortexasdk.api.product.Product] Represent products as a list. to_df ProductResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent products as a pd.DataFrame . Arguments columns : The product features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'layer.0', 'parent.0.name'] . Returns pd.DataFrame of products.","title":"Products"},{"location":"endpoints/storage_terminals/","text":"vortexasdk.endpoints.storage_terminals Try me out in your browser: StorageTerminals StorageTerminals(self) Storage Terminals endpoint. A Storage Terminal is a reference value that corresponds to an ID associated with other entities. For example, a storage terminal object may have the following keys: { \"name\": \"Military Oil Depot\", \"parent\": { \"name\": \"Bandar Khomeini, Bandar Mahshahr [IR]\" } ... } These IDs represent storage terminals which can be found via the Storage Terminal reference endpoint. When the storage terminals endpoint is searched with those ids as parameters: >>> from vortexasdk import StorageTerminals >>> df = StorageTerminals().search(ids=[\"08bbaf7a67ab30036d73b9604b932352a73905e16b8342b27f02ae34941b7db5\"]).to_df() Returns id name lat lon 0 08bbaf7a67ab30036d73... Military Oil Depot 90 180 load_all StorageTerminals.load_all(self) -> vortexasdk.endpoints.storage_terminals_result.StorageTerminalResult Load all storage terminals. search StorageTerminals.search(self, ids: Union[str, List[str]] = None, term: Union[str, List[str]] = None) -> vortexasdk.endpoints.storage_terminals_result.StorageTerminalResult Find all storage terminals matching given term. Arguments ids : List of storage terminal ids to filter by. term : List of terms to filter on. Returns List of storage terminals matching the ids or term specified. Examples Find a storage terminal by term, for example the name of the storage terminal. >>> from vortexasdk import StorageTerminals >>> df = StorageTerminals().search(term=[\"Military\"]).to_df() Returns id name lat lon 0 08bbaf7a67ab30036d73... Military Oil Depot 90 180 vortexasdk.endpoints.storage_terminals_result StorageTerminalResult StorageTerminalResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Storage Terminals endpoint. to_list StorageTerminalResult.to_list(self) -> List[vortexasdk.api.storage_terminal.StorageTerminal] Represent storage terminals as a list. to_df StorageTerminalResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent storage terminals as a pd.DataFrame . Arguments columns : The storage terminals features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'lat', 'lon'] . Returns pd.DataFrame of storage terminals.","title":"Storage Terminals"},{"location":"endpoints/vessel_availability_breakdown/","text":"vortexasdk.endpoints.vessel_availability_breakdown Try me out in your browser: VesselAvailabilityBreakdown VesselAvailabilityBreakdown(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VesselAvailabilityBreakdown.search(self, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_status: str = None, filter_vessel_location: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_destination: Union[str, List[str]] = None, filter_region: str = None, filter_port: str = None, use_reference_port: bool = False, filter_days_to_arrival: List[Dict[str, int]] = None, filter_vessel_dwt_min: int = None, filter_vessel_dwt_max: int = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_idle_min: int = None, filter_vessel_idle_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_recent_visits: str = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_vessel_status: str = None, exclude_vessel_location: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_destination: Union[str, List[str]] = None) -> vortexasdk.endpoints.timeseries_result.TimeSeriesResult Number and DWT of all vessels that can be available to load a given cargo at a given port, grouped by the number of days to arrival. Arguments filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_destination : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels filter_vessel_location : A location ID, or list of location IDs to filter on. filter_port : Filter by port ID. filter_region : Filter by region ID - takes precedence over filter_port if provided. This should be used in conjunction with use_reference_port filter_days_to_arrival : Filter availability by time to arrival in days` use_reference_port : If this flag is enabled, we will return data for the reference port instead of the user selected one, filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_idle_min : A number greater than 0 (representing idle days). filter_vessel_idle_max : A number greater than 0 and filter_vessel_idle_min (representing idle days). filter_vessel_dwt_min : A number between 0 and 550000. filter_vessel_dwt_max : A number between 0 and 550000. filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_recent_visits : Filter availability by each vessel's recent visits exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to filter on. exclude_destination : A location ID, or list of location IDs to filter on. Returns TimeSeriesResult Example Breakdown of number and DWT of all vessels arriving at Rotterdam in the next 5 days. >>> from vortexasdk import VesselAvailabilityBreakdown, Geographies >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> df = VesselAvailabilityBreakdown().search( ... filter_port=rotterdam[0], ... filter_days_to_arrival={\"min\": 0, \"max\": 5} ... ).to_df() Gives the following: key value count 0 2021-06-23 00:00:00+00:00 2939754 34 1 2021-06-24 00:00:00+00:00 2676732 38 2 2021-06-25 00:00:00+00:00 6262914 74 3 2021-06-26 00:00:00+00:00 3445105 43 4 2021-06-27 00:00:00+00:00 3924460 51","title":"Breakdown"},{"location":"endpoints/vessel_availability_search/","text":"vortexasdk.endpoints.vessel_availability_search Try me out in your browser: VesselAvailabilitySearch VesselAvailabilitySearch(self) Vessel Availability Endpoint, use this to search through Vortexa's vessel availability data. Please note: you will require a subscription to our Freight module to access this endpoint. search VesselAvailabilitySearch.search(self, filter_region: str = None, filter_port: str = None, use_reference_port: bool = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_status: str = None, filter_vessel_location: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_destination: Union[str, List[str]] = None, filter_days_to_arrival: List[Dict[str, int]] = None, filter_vessel_dwt_min: int = None, filter_vessel_dwt_max: int = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_idle_min: int = None, filter_vessel_idle_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_recent_visits: str = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_vessel_status: str = None, exclude_vessel_location: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_destination: Union[str, List[str]] = None, offset: int = None, order: str = None, order_direction: str = None) -> vortexasdk.endpoints.vessel_availability_result.VesselAvailabilityResult List of vessels that can be available to load a given cargo at a given port on a future date. Arguments order : Used to sort the returned results. Must be one of the following: [\u2018vessel_status\u2019, \u2018days_to_arrival\u2019, \u2018days_idle\u2019]. order_direction : Determines the direction of sorting. \u2018asc\u2019 for ascending, \u2018desc\u2019 for descending. offset : Used to page results. The offset from which records should be returned. size : Used to page results. The size of the result set. Between 0 and 500. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_destination : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels filter_vessel_location : A location ID, or list of location IDs to filter on. filter_port : Filter by port ID. filter_region : Filter by region ID - takes precedence over filter_port if provided. This should be used in conjunction with use_reference_port filter_days_to_arrival : Filter availability by time to arrival in days` use_reference_port : If this flag is enabled, we will return data for the reference port instead of the user selected one, filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_idle_min : A number greater than 0 (representing idle days). filter_vessel_idle_max : A number greater than 0 and filter_vessel_idle_min (representing idle days). filter_vessel_dwt_min : A number between 0 and 550000. filter_vessel_dwt_max : A number between 0 and 550000. filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_recent_visits : Filter availability by each vessel's recent visits exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to filter on. exclude_destination : A location ID, or list of location IDs to filter on. Returns VesselAvailabilityResult Example Top 2 available vessels arriving at Rotterdam port in the next 5 days. >>> from vortexasdk import VesselAvailabilitySearch, Geographies >>> rotterdam = \"68faf65af1345067f11dc6723b8da32f00e304a6f33c000118fccd81947deb4e\" >>> df = VesselAvailabilitySearch().search( ... filter_port=rotterdam, ... filter_days_to_arrival={\"min\": 1, \"max\": 5} ... ).to_df(columns=['available_at','vessel_name','vessel_class']).head(2) available_at vessel_name vessel_class 0 2017-09-30 15:30:27+00:00 STAR RIVER handysize 1 2017-08-29 14:51:32+00:00 AMALTHEA aframax","title":"Search"},{"location":"endpoints/vessel_availability_timeseries/","text":"vortexasdk.endpoints.vessel_availability_timeseries Try me out in your browser: VesselAvailabilityTimeseries VesselAvailabilityTimeseries(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VesselAvailabilityTimeseries.search(self, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_status: str = None, filter_vessel_location: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_destination: Union[str, List[str]] = None, filter_region: str = None, filter_port: str = None, use_reference_port: bool = False, filter_days_to_arrival: List[Dict[str, int]] = None, filter_vessel_dwt_min: int = None, filter_vessel_dwt_max: int = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_idle_min: int = None, filter_vessel_idle_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_recent_visits: str = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_vessel_status: str = None, exclude_vessel_location: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_destination: Union[str, List[str]] = None) -> vortexasdk.endpoints.timeseries_result.TimeSeriesResult Time series of the number of vessels that can be available to load a given cargo at a given port for every day in the specified range. Arguments filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_destination : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels filter_vessel_location : A location ID, or list of location IDs to filter on. filter_port : Filter by port ID. filter_region : Filter by region ID - takes precedence over filter_port if provided. This should be used in conjunction with use_reference_port filter_days_to_arrival : Filter availability by time to arrival in days` use_reference_port : If this flag is enabled, we will return data for the reference port instead of the user selected one, filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_idle_min : A number greater than 0 (representing idle days). filter_vessel_idle_max : A number greater than 0 and filter_vessel_idle_min (representing idle days). filter_vessel_dwt_min : A number between 0 and 550000. filter_vessel_dwt_max : A number between 0 and 550000. filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_recent_visits : Filter availability by each vessel's recent visits exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to filter on. exclude_destination : A location ID, or list of location IDs to filter on. Returns TimeSeriesResult Example Time series for the number of vessels available between 0 to 5 days, at port Rotterdam, over 4 days. >>> from vortexasdk import VesselAvailabilityTimeseries, Geographies >>> from datetime import datetime >>> rotterdam = \"68faf65af1345067f11dc6723b8da32f00e304a6f33c000118fccd81947deb4e\" >>> start = datetime(2021, 6, 17) >>> end = datetime(2021, 6, 21) >>> df = (VesselAvailabilityTimeseries().search( ... filter_time_min=start, ... filter_time_max=end, ... filter_port=rotterdam, ... filter_days_to_arrival={\"min\": 0, \"max\": 5}, ... ).to_df()) Gives the following: key value count 0 2021-06-23 00:00:00+00:00 19225923 224 1 2021-06-24 00:00:00+00:00 19634766 233 2 2021-06-25 00:00:00+00:00 19154857 228 3 2021-06-26 00:00:00+00:00 18410395 225","title":"Time Series"},{"location":"endpoints/vessel_class_breakdown/","text":"vortexasdk.endpoints.vessel_class_breakdown Try me out in your browser: VesselClassBreakdown VesselClassBreakdown(self) search VesselClassBreakdown.search(self, breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Vessel class locations breakdown aggregation Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by vessel class of cargoes in tonnes. >>> from vortexasdk import VesselClassBreakdown >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = VesselClassBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key value count 0 vlcc_plus 94855526 24 1 aframax 776599 17 2 handymax 381359 24 3 handysize 238723 21 4 panamax 118285 18 5 tiny_tanker 118285 18 6 general_purpose 118285 18 5 sgc 118285 18","title":"Vessel Class Breakdown"},{"location":"endpoints/vessels/","text":"vortexasdk.endpoints.vessels Try me out in your browser: Vessels Vessels(self) Vessels endpoint. load_all Vessels.load_all(self) -> vortexasdk.endpoints.vessels_result.VesselsResult Load all vessels. search Vessels.search(self, term: Union[str, List[str]] = None, ids: Union[str, List[str]] = None, vessel_classes: Union[str, List[str]] = None, vessel_product_types: Union[str, List[str]] = None, vessel_scrubbers: str = 'disabled', exact_term_match: bool = False) -> vortexasdk.endpoints.vessels_result.VesselsResult Find all vessels matching given search arguments. Search arguments are combined in an AND manner. Arguments term : The name(s) (or partial name(s)) of a vessel we'd like to search ids : ID or IDs of vessels we'd like to search vessel_classes : vessel_class (or list of vessel classes) we'd like to search. Each vessel class must be one of \"tiny_tanker\" , \"general_purpose\" , \"handysize\" , \"handymax\" , \"panamax\", \"aframax\" , \"suezmax\" , \"vlcc_plus\" , \"sgc\" , \"mgc\" , \"lgc\" , \"vlgc\" . Refer to VortexaAPI Vessel Entities for the most up-to-date list of vessel classes. vessel_product_types : A product ID, or list of product IDs to filter on, searching vessels currently carrying these products. vessel_scrubbers : An optional filter to filter on vessels with or without scrubbers. To disable the filter (the default behaviour), enter 'disabled'. To only include vessels with scrubbers, enter 'inc'. To exclude vessels with scrubbers, enter 'exc'. exact_term_match : Search on only exact term matches, or allow similar matches. e.g. When searching for \"Ocean\" with exact_term_match=False , then the SDK will yield vessels named ['Ocean', 'Ocean Wisdom', ...] etc. When exact_term_match=True , the SDK will only yield the vessel named Ocean . Returns List of vessels matching the search arguments. Examples Let's find all the VLCCs with 'ocean' in their name, or related names. >>> from vortexasdk import Vessels >>> vessels_df = Vessels().search(vessel_classes='vlcc', term='ocean').to_df(columns=['name', 'imo', 'mmsi', 'related_names']) name imo mmsi related_names 0 OCEANIS 9532757 241089000 ['OCEANIS'] 1 AEGEAN 9732553 205761000 ['GENER8 OCEANUS'] 2 OCEANIA 9246633 205753000 ['OCEANIA' 3 ENEOS OCEAN 9662875 432986000 ['ENEOS OCEAN'] 4 OCEAN LILY 9284960 477178100 ['OCEAN LILY'] 5 SHINYO OCEAN 9197868 636019316 ['SHINYO OCEAN'] 6 NASHA 9079107 370497000 ['OCEANIC'] 7 HUMANITY 9180281 422204700 ['OCEAN NYMPH'] Note the term search also looks for vessels with matching related_names Let's find all the vessels currently carrying Crude. >>> from vortexasdk import Vessels, Products >>> crude = [p.id for p in Products().search(term=\"crude\").to_list() if 'group' in p.layer] >>> vessels_df = Vessels().search(vessel_product_types=crude).to_df() Further Documentation VortexaAPI Vessel Reference reference Vessels.reference(self, id: str) -> Dict Perform a vessel lookup. Arguments id : Vessel ID to lookup Returns Vessel record matching the ID Further Documentation: VortexaAPI Vessel Reference vortexasdk.endpoints.vessels_result VesselsResult VesselsResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Vessels endpoint. to_list VesselsResult.to_list(self) -> List[vortexasdk.api.vessel.Vessel] Represent vessels as a list. to_df VesselsResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent vessels as a pd.DataFrame . Arguments columns : The vessel features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'imo', 'vessel_class'] . Returns pd.DataFrame of vessels.","title":"Vessels"},{"location":"endpoints/voyages_congestion_breakdown/","text":"vortexasdk.endpoints.voyages_congestion_breakdown Try me out in your browser: VoyagesCongestionBreakdown VoyagesCongestionBreakdown(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VoyagesCongestionBreakdown.search(self, breakdown_property: str = 'port', breakdown_size: int = 2000, order: str = None, order_direction: str = None, time_min: datetime.datetime = datetime.datetime(2022, 1, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2022, 1, 1, 1, 0), voyage_id: Union[str, List[str]] = None, cargo_movement_id: Union[str, List[str]] = None, voyage_status: Union[str, List[str]] = None, voyage_status_excluded: Union[str, List[str]] = None, movement_status: Union[str, List[str]] = None, movement_status_excluded: Union[str, List[str]] = None, cargo_status: Union[str, List[str]] = None, cargo_status_excluded: Union[str, List[str]] = None, location_status: Union[str, List[str]] = None, location_status_excluded: Union[str, List[str]] = None, commitment_status: Union[str, List[str]] = None, commitment_status_excluded: Union[str, List[str]] = None, exclude_overlapping_entries: bool = None, products: Union[str, List[str]] = None, products_excluded: Union[str, List[str]] = None, latest_products: Union[str, List[str]] = None, latest_products_excluded: Union[str, List[str]] = None, charterers: Union[str, List[str]] = None, charterers_excluded: Union[str, List[str]] = None, effective_controllers: Union[str, List[str]] = None, effective_controllers_excluded: Union[str, List[str]] = None, origins: Union[str, List[str]] = None, origins_excluded: Union[str, List[str]] = None, destinations: Union[str, List[str]] = None, destinations_excluded: Union[str, List[str]] = None, locations: Union[str, List[str]] = None, locations_excluded: Union[str, List[str]] = None, vessels: Union[str, List[str]] = None, vessels_excluded: Union[str, List[str]] = None, flags: Union[str, List[str]] = None, flags_excluded: Union[str, List[str]] = None, ice_class: Union[str, List[str]] = None, ice_class_excluded: Union[str, List[str]] = None, vessel_propulsion: Union[str, List[str]] = None, vessel_propulsion_excluded: Union[str, List[str]] = None, vessel_age_min: int = None, vessel_age_max: int = None, vessel_dwt_min: int = None, vessel_dwt_max: int = None, vessel_cbm_min: int = None, vessel_cbm_max: int = None, vessel_wait_time_min: int = None, vessel_wait_time_max: int = None, vessel_scrubbers: str = None, vessels_tags: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessels_tags_excluded: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessel_risk_level: Union[str, List[str]] = None, vessel_risk_level_excluded: Union[str, List[str]] = None, has_ship_to_ship: bool = None, has_charterer: bool = None) -> vortexasdk.endpoints.voyages_congestion_breakdown_result.CongestionBreakdownResult Returns a count of congested voyages aggregated by congestion location. Arguments breakdown_size : Number of top records to return. breakdown_property : Property to aggregate upon. Can be one of: 'port' , 'shipping_region' , 'terminal' . time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. voyage_id : An array of unique voyage ID(s) to filter on. cargo_movement_id : An array of unique cargo movement ID(s) to filter on. voyage_status : A voyage status, or list of voyage statuses to filter on. Can be one of: 'ballast' , 'laden' . voyage_status_excluded : A voyage status, or list of voyage statuses to exclude. movement_status : A movement status, or list of movement statuses to filter on. Can be one of: 'moving' , 'stationary' , 'waiting' , 'congestion' , 'slow' . movement_status_excluded : A movement status, or list of movement statuses to exclude. cargo_status : A cargo status, or list of cargo statuses to filter on. Can be one of: 'in-transit' , 'floating-storage' , 'loading' , 'discharging' . cargo_status_excluded : A cargo status, or list of cargo statuses to exclude. location_status : A location status, or list of location statuses to filter on. Can be one of: 'berth' , 'anchorage-zone' , 'dry-dock' , 'on-the-sea' . location_status_excluded : A location status, or list of location statuses to exclude. commitment_status : A commitment status, or list of commitment statuses to filter on. Can be one of: 'committed' , 'uncommitted' , 'open' , 'unknown' . commitment_status_excluded : A commitment status, or list of commitment statuses to exclude. exclude_overlapping_entries : A boolean to only consider the latest voyage in days where two or more Voyages overlap. products : A product ID, or list of product IDs to filter on. products_excluded : A product ID, or list of product IDs to exclude. latest_products : A product ID, or list of product IDs of the latest cargo on board to filter on. latest_products_excluded : A product ID, or list of product IDs of the latest cargo on board to exclude. charterers : A charterer ID, or list of charterer IDs to filter on. charterers_excluded : A charterer ID, or list of charterer IDs to exclude. effective_controllers : A vessel effective controller ID, or list of vessel effective controller IDs to filter on. effective_controllers_excluded : A effective controller ID, or list of effective controller IDs to exclude. origins : An origin ID, or list of origin IDs to filter on. origins_excluded : An origin ID, or list of origin IDs to exclude. destinations : A destination ID, or list of destination IDs to filter on. destinations_excluded : A destination ID, or list of destination IDs to exclude. locations : A location ID, or list of location IDs to filter on. locations_excluded : A location ID, or list of location IDs to exclude. vessels : A vessel ID or vessel class, or list of vessel IDs/vessel classes to filter on. vessels_excluded : A vessel ID or vessel class, or list of vessel IDs/vessel classes to exclude. flags : A flag, or list of flags to filter on. flags_excluded : A flag, or list of flags to exclude. ice_class : An ice class, or list of ice classes to filter on. ice_class_excluded : An ice class, or list of ice classes to \u0119xclude. vessel_propulsion : A propulsion method, or list of propulsion methods to filter on. vessel_propulsion_excluded : A propulsion method, or list of propulsion methods to \u0119xclude. vessel_age_min : A number between 1 and 100 (representing years). vessel_age_max : A number between 1 and 100 (representing years). vessel_dwt_min : A number representing minimum deadweight tonnage of a vessel. vessel_dwt_max : A number representing maximum deadweight tonnage of a vessel. vessel_cbm_min : A number representing minimum cubic capacity of a vessel. vessel_cbm_max : A number representing maximum cubic capacity tonnage of a vessel. vessel_wait_time_min : A number representing a minimum number of days until a vessel becomes available. vessel_wait_time_max : A number representing a maximum number of days until a vessel becomes available. vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. vessels_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. vessels_tags_excluded : A time bound vessel tag, or list of time bound vessel tags to exclude. vessel_risk_level : A vessel risk level, or list of vessel risk levels to filter on. vessel_risk_level_excluded : A vessel risk level, or list of vessel risk levels to exclude. has_ship_to_ship : A boolean to show data where at least one STS transfer occurs. has_charterer : A boolean to show data where at least one charterer is specified. order : Used to sort the returned results. Can be one of: 'location' , 'avg_wait' , 'dwt' , 'capacity' , 'count' . order_direction : Determines the direction of sorting. \u2018asc\u2019 for ascending, \u2018desc\u2019 for descending. Returns CongestionBreakdownResult Example Stats for vessels in congestion on 26th April 2022 split by shipping region. >>> from vortexasdk import VoyagesCongestionBreakdown >>> from datetime import datetime >>> search_result = VoyagesCongestionBreakdown().search( ... time_min=datetime(2022, 4, 26), ... time_max=datetime(2022, 4, 26, 23, 59), ... movement_status=\"congestion\", ... breakdown_property=\"shipping_region\", ... breakdown_size=2, ... ).to_df() Gives the following result: | | avg_waiting_time | vessel_dwt | vessel_cubic_capacity | vessel_count | cargo_quantity | avg_waiting_time_laden | vessel_dwt_laden | vessel_cubic_capacity_laden | vessel_count_laden | avg_waiting_time_ballast | vessel_dwt_ballast | vessel_cubic_capacity_ballast | vessel_count_ballast | location_details.0.label | |---:|-------------------:|-------------:|------------------------:|---------------:|-----------------:|-------------------------:|-------------------:|------------------------------:|---------------------:|---------------------------:|---------------------:|--------------------------------:|-----------------------:|:---------------------------| | 0 | 0 | 9199789 | 10271697 | 353 | 320829 | 0 | 7104725 | 7943428 | 161 | 0 | 2095064 | 2328269 | 192 | East Asia | | 1 | 0 | 6415240 | 7241430 | 248 | 106209 | 0 | 3392911 | 3815449 | 126 | 0 | 3022329 | 3425981 | 122 | South East Asia |","title":"Congestion Breakdown"},{"location":"endpoints/voyages_search_enriched/","text":"vortexasdk.endpoints.voyages_search_enriched Try me out in your browser: VoyagesSearchEnriched VoyagesSearchEnriched(self) search VoyagesSearchEnriched.search(self, order: str = None, order_direction: str = None, offset: int = None, unit: str = None, columns: Union[str, List[str]] = None, time_min: datetime.datetime = datetime.datetime(2022, 1, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2022, 1, 1, 1, 0), voyage_id: Union[str, List[str]] = None, cargo_movement_id: Union[str, List[str]] = None, voyage_status: Union[str, List[str]] = None, voyage_status_excluded: Union[str, List[str]] = None, movement_status: Union[str, List[str]] = None, movement_status_excluded: Union[str, List[str]] = None, cargo_status: Union[str, List[str]] = None, cargo_status_excluded: Union[str, List[str]] = None, location_status: Union[str, List[str]] = None, location_status_excluded: Union[str, List[str]] = None, commitment_status: Union[str, List[str]] = None, commitment_status_excluded: Union[str, List[str]] = None, exclude_overlapping_entries: bool = None, products: Union[str, List[str]] = None, products_excluded: Union[str, List[str]] = None, latest_products: Union[str, List[str]] = None, latest_products_excluded: Union[str, List[str]] = None, charterers: Union[str, List[str]] = None, charterers_excluded: Union[str, List[str]] = None, effective_controllers: Union[str, List[str]] = None, effective_controllers_excluded: Union[str, List[str]] = None, origins: Union[str, List[str]] = None, origins_excluded: Union[str, List[str]] = None, destinations: Union[str, List[str]] = None, destinations_excluded: Union[str, List[str]] = None, locations: Union[str, List[str]] = None, locations_excluded: Union[str, List[str]] = None, vessels: Union[str, List[str]] = None, vessels_excluded: Union[str, List[str]] = None, flags: Union[str, List[str]] = None, flags_excluded: Union[str, List[str]] = None, ice_class: Union[str, List[str]] = None, ice_class_excluded: Union[str, List[str]] = None, vessel_propulsion: Union[str, List[str]] = None, vessel_propulsion_excluded: Union[str, List[str]] = None, vessel_age_min: int = None, vessel_age_max: int = None, vessel_dwt_min: int = None, vessel_dwt_max: int = None, vessel_cbm_min: int = None, vessel_cbm_max: int = None, vessel_wait_time_min: int = None, vessel_wait_time_max: int = None, vessel_scrubbers: str = None, vessels_tags: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessels_tags_excluded: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessel_risk_level: Union[str, List[str]] = None, vessel_risk_level_excluded: Union[str, List[str]] = None, has_ship_to_ship: str = None, has_charterer: str = None) -> Union[vortexasdk.endpoints.voyages_search_enriched_result.VoyagesSearchEnrichedFlattenedResult, vortexasdk.endpoints.voyages_search_enriched_result.VoyagesSearchEnrichedListResult] Returns one record per voyage, containing a selection of information about the voyage. NOTE: To display results as a list ( to_list() ), please set the columns parameter to None . To display results as dataframe ( to_df() ), please set the columns parameter to all or a list of selected columns. Arguments time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. voyage_id : An array of unique voyage ID(s) to filter on. cargo_movement_id : An array of unique cargo movement ID(s) to filter on. voyage_status : A voyage status, or list of voyage statuses to filter on. Can be one of: 'ballast' , 'laden' . voyage_status_excluded : A voyage status, or list of voyage statuses to exclude. movement_status : A movement status, or list of movement statuses to filter on. Can be one of: 'moving' , 'stationary' , 'waiting' , 'congestion' , 'slow' . movement_status_excluded : A movement status, or list of movement statuses to exclude. cargo_status : A cargo status, or list of cargo statuses to filter on. Can be one of: 'in-transit' , 'floating-storage' , 'loading' , 'discharging' . cargo_status_excluded : A cargo status, or list of cargo statuses to exclude. location_status : A location status, or list of location statuses to filter on. Can be one of: 'berth' , 'anchorage-zone' , 'dry-dock' , 'on-the-sea' . location_status_excluded : A location status, or list of location statuses to exclude. commitment_status : A commitment status, or list of commitment statuses to filter on. Can be one of: 'committed' , 'uncommitted' , 'open' , 'unknown' . commitment_status_excluded : A commitment status, or list of commitment statuses to exclude. exclude_overlapping_entries : A boolean to only consider the latest voyage in days where two or more Voyages overlap. products : A product ID, or list of product IDs to filter on. products_excluded : A product ID, or list of product IDs to exclude. latest_products : A product ID, or list of product IDs of the latest cargo on board to filter on. latest_products_excluded : A product ID, or list of product IDs of the latest cargo on board to exclude. charterers : A charterer ID, or list of charterer IDs to filter on. charterers_excluded : A charterer ID, or list of charterer IDs to exclude. effective_controllers : A vessel effective controller ID, or list of vessel effective controller IDs to filter on. effective_controllers_excluded : A effective controller ID, or list of effective controller IDs to exclude. origins : An origin ID, or list of origin IDs to filter on. origins_excluded : An origin ID, or list of origin IDs to exclude. destinations : A destination ID, or list of destination IDs to filter on. destinations_excluded : A destination ID, or list of destination IDs to exclude. locations : A location ID, or list of location IDs to filter on. locations_excluded : A location ID, or list of location IDs to exclude. vessels : A vessel ID or vessel class, or list of vessel IDs/vessel classes to filter on. vessels_excluded : A vessel ID or vessel class, or list of vessel IDs/vessel classes to exclude. flags : A flag, or list of flags to filter on. flags_excluded : A flag, or list of flags to exclude. ice_class : An ice class, or list of ice classes to filter on. ice_class_excluded : An ice class, or list of ice classes to \u0119xclude. vessel_propulsion : A propulsion method, or list of propulsion methods to filter on. vessel_propulsion_excluded : A propulsion method, or list of propulsion methods to \u0119xclude. vessel_age_min : A number between 1 and 100 (representing years). vessel_age_max : A number between 1 and 100 (representing years). vessel_dwt_min : A number representing minimum deadweight tonnage of a vessel. vessel_dwt_max : A number representing maximum deadweight tonnage of a vessel. vessel_cdm_min : A number representing minimum cubic capacity of a vessel. vessel_cbm_max : A number representing maximum cubic capacity of a vessel. vessel_wait_time_min : A number representing a minimum number of days until a vessel becomes available. vessel_wait_time_max : A number representing a maximum number of days until a vessel becomes available. vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. vessels_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. vessels_tags_excluded : A time bound vessel tag, or list of time bound vessel tags to exclude. vessel_risk_level : A vessel risk level, or list of vessel risk levels to filter on. vessel_risk_level_excluded : A vessel risk level, or list of vessel risk levels to exclude. has_ship_to_ship : Filter data where at least one STS transfer occurs, or none. - one of disabled , inc , exc . Passing disabled means the filter is not active. has_charterer : Filter data where at least one charterer is specified, or none. - one of disabled , inc , exc . Passing disabled means the filter is not active. order : Used to sort the returned results. Can be one of: 'vessel_name' , 'dwt' , 'vessel_class' , 'start_date' , 'end_date' . order_direction : Determines the direction of sorting. \u2018asc\u2019 for ascending, \u2018desc\u2019 for descending. offset : Used to page results. The offset from which records should be returned. unit : Unit of measurement. Enter 'b' for barrels or 't' for tonnes or 'cbm' for cubic metres. columns : Determines what columns are visible in the output. Enter \"all\" for all columns, or any of: 'vessel_name' , 'imo' , 'dwt' , 'capacity' , 'vessel_class' , 'voyage_status' , 'cargo_status' , 'origin' , 'origin_shipping_region' , 'origin_region' , 'origin_country' , 'origin_port' , 'origin_terminal' , 'destination' , 'destination_shipping_region' , 'destination_region' , 'destination_country' , 'destination_port' , 'destination_terminal' , 'destination_eta' , 'charterer' , 'effective_controller' , 'voyage_type' , 'quantity' , 'latest_product' , 'latest_product_group' , 'latest_product_category' , 'latest_product_grade' , 'time_charterer' , 'flag' , 'scrubber' , 'build_year' , 'risk_rating' , 'coating' , 'start_date' , 'end_date' , 'tonne_miles' , 'distance' , 'voyage_id' , 'previous_voyage_id' , 'next_voyage_id' , 'duration' , 'first_origin' , 'first_origin_shipping_region' , 'first_origin_country' , 'first_origin_port' , 'first_origin_terminal' , 'final_destination' , 'final_destination_shipping_region' , 'final_destination_country' , 'final_destination_port' , 'final_destination_terminal' . Returns VoyagesSearchEnrichedListResult or VoyagesSearchEnrichedFlattenedResult Example Voyages as of 13th Feb 2023 for vessels carrying crude departing from Rotterdam. >>> from vortexasdk import VoyagesSearchEnriched, Geographies >>> from datetime import datetime >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> start = datetime(2023, 2, 13) >>> end = datetime(2023, 2, 13, 23, 59) >>> search_result = VoyagesSearchEnriched().search( ... time_min=start, ... time_max=end, ... origins=rotterdam, ... columns=\"all\", ... ).to_df().head() Gives the following result: | | VESSEL NAME | IMO | DWT (t) | CAPACITY (cbm) | VESSEL CLASS | VOYAGE STATUS | ORIGIN | ORIGIN TERMINAL | ORIGIN PORT | ORIGIN COUNTRY | ORIGIN SHIPPING REGION | DESTINATION | DESTINATION TERMINAL | DESTINATION PORT | DESTINATION COUNTRY | DESTINATION SHIPPING REGION | START DATE | END DATE | LATEST PRODUCT | LATEST PRODUCT GROUP | LATEST PRODUCT CATEGORY | LATEST PRODUCT GRADE | QUANTITY (bbl) | CHARTERER | EFFECTIVE CONTROLLER | TIME CHARTERER | BUILD YEAR | FLAG | RISK RATING | SCRUBBERS | COATING | TONNE-MILES | DURATION (h) | DISTANCE | VOYAGE ID | PREVIOUS VOYAGE ID | NEXT VOYAGE ID | ORIGIN GEOGRAPHIC REGION | DESTINATION GEOGRAPHIC REGION | FIRST ORIGIN | FIRST ORIGIN TERMINAL | FIRST ORIGIN PORT | FIRST ORIGIN COUNTRY | FIRST ORIGIN SHIPPING REGION | FINAL DESTINATION | FINAL DESTINATION TERMINAL | FINAL DESTINATION PORT | FINAL DESTINATION COUNTRY | FINAL DESTINATION SHIPPING REGION | |---:|:---------------------|--------:|----------:|-----------------:|:----------------|:----------------|:--------------------------------|:------------------------|:--------------------------------|:--------------------|:-------------------------|:---------------|:-----------------------|:-------------------|:----------------------|:------------------------------|:-------------|:------------|:--------------------------------|:-------------------------|:------------------------------|:-----------------------|-----------------:|:------------|:-----------------------|:-----------------|-------------:|:-------|:--------------|:------------|:----------|:--------------|---------------:|:-----------|:-----------------------------------------------------------------|:-----------------------------------------------------------------|:-----------------|:---------------------------|:--------------------------------|:------------------|:------------------------|:--------------------|:-----------------------|:-------------------------------|:--------------------|:-----------------------------|:-------------------------|:----------------------------|:------------------------------------| | 0 | YM NEPTUNE | 9464106 | 6970 | 8091 | Tiny tanker | Laden | Frontignan [FR], Rotterdam [NL] | , Vopak Terminal Botlek | Frontignan [FR], Rotterdam [NL] | France, Netherlands | West Med, UK Cont | | | | | | 28 Dec 2022 | | Biodiesel, Other Clean Products | Clean Petroleum Products | Finished Biodiesel, Chemicals | | 19058 | | | | 2009 | MT | Low | | Coated | 15708783 | 1139 | 7570 | 0edb92ac9addd29b42ede8a8b4843ceb0edb92ac9addd29b42ede8a8b4843ceb | f2b314f245a391ee4e1ebcc41d9a2d2741526f0f3625183440e7e280092cfe91 | | Europe, Europe | | Frontignan [FR] | | Frontignan [FR] | France | West Med | | | | | | | 1 | YASA POLARIS | 9907457 | 157300 | 167231 | Suezmax | Ballast | Rotterdam [NL] | TEAM Terminal B.V. | Rotterdam [NL] | Netherlands | UK Cont | Rotterdam [NL] | | Rotterdam [NL] | Netherlands | UK Cont | 13 Feb 2023 | | Crude | Crude/Condensates | Light-Sour | CPC Blend | 0 | | Ya-Sa Shipping | | 2021 | MH | Medium | | | | 14 | | ac6c49388567f546d4f57a3e8e8c984e2188f68407394bbe3fde99a0aaff72d7 | f9cab95f35202ab0b273d6d646202080f9cab95f35202ab0b273d6d646202080 | | Europe | Europe | Rotterdam [NL] | TEAM Terminal B.V. | Rotterdam [NL] | Netherlands | UK Cont | Rotterdam [NL] | | Rotterdam [NL] | Netherlands | UK Cont | | 2 | XING HAI WAN | 9570113 | 6123 | 6022 | Tiny tanker | Laden | Rotterdam [NL] | Shell - Rotterdam | Rotterdam [NL] | Netherlands | UK Cont | | | | | | 07 Feb 2023 | | Asphalt/Bitumen | Dirty Petroleum Products | | | 18513 | | | | 2009 | PA | Low | | | 3848284 | 144 | 1257 | 2bb322f03f203bf2570654e6dc22c52e2bb322f03f203bf2570654e6dc22c52e | 2cec64d13c15f4e8999068c63a898335a75bc99b600f97768655ae748b75a2d7 | | Europe | | Rotterdam [NL] | Shell - Rotterdam | Rotterdam [NL] | Netherlands | UK Cont | | | | | | | 3 | XANTHIA | 9246152 | 17031 | 17829 | General purpose | Laden | Rotterdam [NL] | Vopak Terminal Botlek | Rotterdam [NL] | Netherlands | UK Cont | Amsterdam [NL] | | Amsterdam [NL] | Netherlands | UK Cont | 12 Feb 2023 | 15 Feb 2023 | Diesel/Gasoil | Clean Petroleum Products | Gasoil | | 43370 | | Neda Maritime | | 2003 | NO | Low | | Coated | 8334 | 85 | 1 | 640a7b6ae43683ef52bdc5141b5b11a7640a7b6ae43683ef52bdc5141b5b11a7 | 3a7353f9128d669f31e9d775ef53d9355d34928f1a77801da59576d523cb95c5 | | Europe | Europe | Rotterdam [NL] | Vopak Terminal Botlek | Rotterdam [NL] | Netherlands | UK Cont | Amsterdam [NL] | | Amsterdam [NL] | Netherlands | UK Cont | | 4 | WOODSIDE REES WITHER | 9810367 | 96000 | 173400 | Conventional | Ballast | Gate LNG Terminal | Gate LNG Terminal | Rotterdam [NL] | Netherlands | UK Cont | | | | | | 01 Feb 2023 | | LNG | Liquefied Natural Gas | Lean | Corpus Christi LNG | 0 | | MARAN GAS MARITIME | | 2019 | GR | Low | | | | 280 | 3967 | 0fa825ab44e6dc5d352db9e8ef47f41e003a794b97b69677ba5f64b2398456e3 | d51d7fc4c74ed04ec69646d297c2f19cd51d7fc4c74ed04ec69646d297c2f19c | | Europe | | Gate LNG Terminal | Gate LNG Terminal | Rotterdam [NL] | Netherlands | UK Cont | | | | | |","title":"Search Enriched"},{"location":"endpoints/voyages_timeseries/","text":"vortexasdk.endpoints.voyages_timeseries Try me out in your browser: VoyagesTimeseries VoyagesTimeseries(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VoyagesTimeseries.search(self, breakdown_frequency: str = None, breakdown_property: str = None, breakdown_split_property: str = None, breakdown_unit_operator: str = None, time_min: datetime.datetime = datetime.datetime(2022, 1, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2022, 1, 1, 1, 0), voyage_id: Union[str, List[str]] = None, cargo_movement_id: Union[str, List[str]] = None, voyage_status: Union[str, List[str]] = None, voyage_status_excluded: Union[str, List[str]] = None, movement_status: Union[str, List[str]] = None, movement_status_excluded: Union[str, List[str]] = None, cargo_status: Union[str, List[str]] = None, cargo_status_excluded: Union[str, List[str]] = None, location_status: Union[str, List[str]] = None, location_status_excluded: Union[str, List[str]] = None, commitment_status: Union[str, List[str]] = None, commitment_status_excluded: Union[str, List[str]] = None, exclude_overlapping_entries: bool = None, products: Union[str, List[str]] = None, products_excluded: Union[str, List[str]] = None, latest_products: Union[str, List[str]] = None, latest_products_excluded: Union[str, List[str]] = None, charterers: Union[str, List[str]] = None, charterers_excluded: Union[str, List[str]] = None, effective_controllers: Union[str, List[str]] = None, effective_controllers_excluded: Union[str, List[str]] = None, origins: Union[str, List[str]] = None, origins_excluded: Union[str, List[str]] = None, destinations: Union[str, List[str]] = None, destinations_excluded: Union[str, List[str]] = None, locations: Union[str, List[str]] = None, locations_excluded: Union[str, List[str]] = None, congestion_target_location: Union[str, List[str]] = None, congestion_target_location_excluded: Union[str, List[str]] = None, vessels: Union[str, List[str]] = None, vessels_excluded: Union[str, List[str]] = None, flags: Union[str, List[str]] = None, flags_excluded: Union[str, List[str]] = None, ice_class: Union[str, List[str]] = None, ice_class_excluded: Union[str, List[str]] = None, vessel_propulsion: Union[str, List[str]] = None, vessel_propulsion_excluded: Union[str, List[str]] = None, vessel_age_min: int = None, vessel_age_max: int = None, vessel_dwt_min: int = None, vessel_dwt_max: int = None, vessel_cbm_min: int = None, vessel_cbm_max: int = None, vessel_wait_time_min: int = None, vessel_wait_time_max: int = None, vessel_scrubbers: str = None, vessels_tags: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessels_tags_excluded: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessel_risk_level: Union[str, List[str]] = None, vessel_risk_level_excluded: Union[str, List[str]] = None, has_ship_to_ship: bool = None, has_charterer: bool = None) -> vortexasdk.endpoints.breakdown_result.BreakdownResult Returns a count of voyages per record for the requested date period Arguments breakdown_frequency : Frequency denoting the granularity of the time series. Must be one of the following: 'day' , 'week' , 'doe_week' , 'month' , 'quarter' , 'year' . breakdown_property : Property to aggregate upon. Can be one of: 'vessel_count' , 'utilisation' , 'cargo_quantity' , 'avg_wait_time' , 'dwt' , 'cubic_capacity' , 'tonne_miles' , 'avg_distance' , 'avg_speed' . breakdown_split_property : Property to split results by. Can be one of: 'vessel_status' , 'vessel_class' , 'vessel_flag' , 'fixture_status' , 'origin_region' , 'origin_shipping_region' , 'origin_trading_region' , 'origin_trading_sub_region' , 'origin_trading_block' , 'origin_country' , 'origin_port' , 'origin_terminal' , 'destination_region' , 'destination_shipping_region' , 'destination_trading_region' , 'destination_trading_sub_region' , 'destination_trading_block' , 'destination_country' , 'destination_port' , 'destination_terminal' , 'location_port' , 'location_country' , 'location_shipping_region' , 'congestion_location_port' , 'congestion_location_country' , 'congestion_location_shipping_region' , 'product_group' , 'product_group_product' , 'product_category' , 'product_grade' , 'charterer' , 'effective_controller' , 'none' or not provided. breakdown_unit_operator : Denotes the type of the aggregation calculation. Can be one of 'sum' or 'avg' . time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. voyage_id : An array of unique voyage ID(s) to filter on. cargo_movement_id : An array of unique cargo movement ID(s) to filter on. voyage_status : A voyage status, or list of voyage statuses to filter on. Can be one of: 'ballast' , 'laden' . voyage_status_excluded : A voyage status, or list of voyage statuses to exclude. movement_status : A movement status, or list of movement statuses to filter on. Can be one of: 'moving' , 'stationary' , 'waiting' , 'congestion' , 'slow' . movement_status_excluded : A movement status, or list of movement statuses to exclude. cargo_status : A cargo status, or list of cargo statuses to filter on. Can be one of: 'in-transit' , 'floating-storage' , 'loading' , 'discharging' . cargo_status_excluded : A cargo status, or list of cargo statuses to exclude. location_status : A location status, or list of location statuses to filter on. Can be one of: 'berth' , 'anchorage-zone' , 'dry-dock' , 'on-the-sea' . location_status_excluded : A location status, or list of location statuses to exclude. commitment_status : A commitment status, or list of commitment statuses to filter on. Can be one of: 'committed' , 'uncommitted' , 'open' , 'unknown' . commitment_status_excluded : A commitment status, or list of commitment statuses to exclude. exclude_overlapping_entries : A boolean to only consider the latest voyage in days where two or more Voyages overlap. products : A product ID, or list of product IDs to filter on. products_excluded : A product ID, or list of product IDs to exclude. latest_products : A product ID, or list of product IDs of the latest cargo on board to filter on. latest_products_excluded : A product ID, or list of product IDs of the latest cargo on board to exclude. charterers : A charterer ID, or list of charterer IDs to filter on. charterers_excluded : A charterer ID, or list of charterer IDs to exclude. effective_controllers : A vessel effective controller ID, or list of vessel effective controller IDs to filter on. effective_controllers_excluded : A effective controller ID, or list of effective controller IDs to exclude. origins : An origin ID, or list of origin IDs to filter on. origins_excluded : An origin ID, or list of origin IDs to exclude. destinations : A destination ID, or list of destination IDs to filter on. destinations_excluded : A destination ID, or list of destination IDs to exclude. locations : A location ID, or list of location IDs to filter on. locations_excluded : A location ID, or list of location IDs to exclude. congestion_target_location : A congestion location ID, or list of congestion location IDs to filter on. congestion_target_location_excluded : A congestion location ID, or list of congestion location IDs to exclude. vessels : A vessel ID or vessel class, or list of vessel IDs/vessel classes to filter on. vessels_excluded : A vessel ID or vessel class, or list of vessel IDs/vessel classes to exclude. flags : A flag, or list of flags to filter on. flags_excluded : A flag, or list of flags to exclude. ice_class : An ice class, or list of ice classes to filter on. ice_class_excluded : An ice class, or list of ice classes to \u0119xclude. vessel_propulsion : A propulsion method, or list of propulsion methods to filter on. vessel_propulsion_excluded : A propulsion method, or list of propulsion methods to \u0119xclude. vessel_age_min : A number between 1 and 100 (representing years). vessel_age_max : A number between 1 and 100 (representing years). vessel_dwt_min : A number representing minimum deadweight tonnage of a vessel. vessel_dwt_max : A number representing maximum deadweight tonnage of a vessel. vessel_cbm_min : A number representing minimum cubic capacity of a vessel. vessel_cbm_max : A number representing maximum cubic capacity of a vessel. vessel_wait_time_min : A number representing a minimum number of days until a vessel becomes available. vessel_wait_time_max : A number representing a maximum number of days until a vessel becomes available. vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. vessels_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. vessels_tags_excluded : A time bound vessel tag, or list of time bound vessel tags to exclude. vessel_risk_level : A vessel risk level, or list of vessel risk levels to filter on. vessel_risk_level_excluded : A vessel risk level, or list of vessel risk levels to exclude. has_ship_to_ship : A boolean to show data where at least one STS transfer occurs. has_charterer : A boolean to show data where at least one charterer is specified. Returns BreakdownResult Example Sum of vessels departing from Rotterdam between 26th-28th April 2022, split by location country. >>> from vortexasdk import VoyagesTimeseries, Geographies >>> from datetime import datetime >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> search_result = VoyagesTimeseries().search( ... origins=rotterdam, ... time_min=datetime(2022, 4, 26), ... time_max=datetime(2022, 4, 28, 23, 59), ... breakdown_frequency=\"day\", ... breakdown_property=\"vessel_count\", ... breakdown_split_property=\"location_country\", ... ).to_df() Gives the following result: | | key | value | count | breakdown.0.label | breakdown.0.count | breakdown.0.value | |---:|:--------------------------|--------:|--------:|:--------------------|--------------------:|--------------------:| | 0 | 2022-04-26 00:00:00+00:00 | 294 | 294 | Netherlands | 85 | 85 | | 1 | 2022-04-27 00:00:00+00:00 | 281 | 281 | Netherlands | 82 | 82 | | 2 | 2022-04-28 00:00:00+00:00 | 279 | 279 | Netherlands | 85 | 85 |","title":"Time Series"},{"location":"endpoints/voyages_top_hits/","text":"vortexasdk.endpoints.voyages_top_hits Try me out in your browser: VoyagesTopHits VoyagesTopHits(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VoyagesTopHits.search(self, breakdown_property: str = None, breakdown_split_property: str = None, breakdown_size: int = None, time_min: datetime.datetime = datetime.datetime(2022, 1, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2022, 1, 1, 1, 0), voyage_id: Union[str, List[str]] = None, cargo_movement_id: Union[str, List[str]] = None, voyage_status: Union[str, List[str]] = None, voyage_status_excluded: Union[str, List[str]] = None, movement_status: Union[str, List[str]] = None, movement_status_excluded: Union[str, List[str]] = None, cargo_status: Union[str, List[str]] = None, cargo_status_excluded: Union[str, List[str]] = None, location_status: Union[str, List[str]] = None, location_status_excluded: Union[str, List[str]] = None, commitment_status: Union[str, List[str]] = None, commitment_status_excluded: Union[str, List[str]] = None, exclude_overlapping_entries: bool = None, products: Union[str, List[str]] = None, products_excluded: Union[str, List[str]] = None, latest_products: Union[str, List[str]] = None, latest_products_excluded: Union[str, List[str]] = None, charterers: Union[str, List[str]] = None, charterers_excluded: Union[str, List[str]] = None, effective_controllers: Union[str, List[str]] = None, effective_controllers_excluded: Union[str, List[str]] = None, origins: Union[str, List[str]] = None, origins_excluded: Union[str, List[str]] = None, destinations: Union[str, List[str]] = None, destinations_excluded: Union[str, List[str]] = None, locations: Union[str, List[str]] = None, locations_excluded: Union[str, List[str]] = None, congestion_target_location: Union[str, List[str]] = None, congestion_target_location_excluded: Union[str, List[str]] = None, vessels: Union[str, List[str]] = None, vessels_excluded: Union[str, List[str]] = None, flags: Union[str, List[str]] = None, flags_excluded: Union[str, List[str]] = None, ice_class: Union[str, List[str]] = None, ice_class_excluded: Union[str, List[str]] = None, vessel_propulsion: Union[str, List[str]] = None, vessel_propulsion_excluded: Union[str, List[str]] = None, vessel_age_min: int = None, vessel_age_max: int = None, vessel_dwt_min: int = None, vessel_dwt_max: int = None, vessel_cbm_min: int = None, vessel_cbm_max: int = None, vessel_wait_time_min: int = None, vessel_wait_time_max: int = None, vessel_scrubbers: str = None, vessels_tags: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessels_tags_excluded: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessel_risk_level: Union[str, List[str]] = None, vessel_risk_level_excluded: Union[str, List[str]] = None, has_ship_to_ship: bool = None, has_charterer: bool = None) -> vortexasdk.endpoints.aggregation_breakdown_result.AggregationBreakdownResult Returns a count of voyages aggregated by a chosen breakdown_property . Arguments breakdown_size : Number of top records to return. breakdown_property : Property to aggregate upon. Must be vessel_count or not provided. breakdown_split_property : Property to split results by. Can be one of: 'vessel_status' , 'vessel_class' , 'vessel_flag' , 'fixture_status' , 'origin_region' , 'origin_shipping_region' , 'origin_trading_region' , 'origin_trading_sub_region' , 'origin_trading_block' , 'origin_country' , 'origin_port' , 'origin_terminal' , 'destination_region' , 'destination_shipping_region' , 'destination_trading_region' , 'destination_trading_sub_region' , 'destination_trading_block' , 'destination_country' , 'destination_port' , 'destination_terminal' , 'location_port' , 'location_country' , 'location_shipping_region' , 'congestion_location_port' , 'congestion_location_country' , 'congestion_location_shipping_region' , 'product_group' , 'product_group_product' , 'product_category' , 'product_grade' . time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. voyage_id : An array of unique voyage ID(s) to filter on. cargo_movement_id : An array of unique cargo movement ID(s) to filter on. voyage_status : A voyage status, or list of voyage statuses to filter on. Can be one of: 'ballast' , 'laden' . voyage_status_excluded : A voyage status, or list of voyage statuses to exclude. movement_status : A movement status, or list of movement statuses to filter on. Can be one of: 'moving' , 'stationary' , 'waiting' , 'congestion' , 'slow' . movement_status_excluded : A movement status, or list of movement statuses to exclude. cargo_status : A cargo status, or list of cargo statuses to filter on. Can be one of: 'in-transit' , 'floating-storage' , 'loading' , 'discharging' . cargo_status_excluded : A cargo status, or list of cargo statuses to exclude. location_status : A location status, or list of location statuses to filter on. Can be one of: 'berth' , 'anchorage-zone' , 'dry-dock' , 'on-the-sea' . location_status_excluded : A location status, or list of location statuses to exclude. commitment_status : A commitment status, or list of commitment statuses to filter on. Can be one of: 'committed' , 'uncommitted' , 'open' , 'unknown' . commitment_status_excluded : A commitment status, or list of commitment statuses to exclude. exclude_overlapping_entries : A boolean to only consider the latest voyage in days where two or more Voyages overlap. products : A product ID, or list of product IDs to filter on. products_excluded : A product ID, or list of product IDs to exclude. latest_products : A product ID, or list of product IDs of the latest cargo on board to filter on. latest_products_excluded : A product ID, or list of product IDs of the latest cargo on board to exclude. charterers : A charterer ID, or list of charterer IDs to filter on. charterers_excluded : A charterer ID, or list of charterer IDs to exclude. effective_controllers : A vessel effective controller ID, or list of vessel effective controller IDs to filter on. effective_controllers_excluded : A vessel effective controller ID, or list of vessel effective controller IDs to exclude. origins : An origin ID, or list of origin IDs to filter on. origins_excluded : An origin ID, or list of origin IDs to exclude. destinations : A destination ID, or list of destination IDs to filter on. destinations_excluded : A destination ID, or list of destination IDs to exclude. locations : A location ID, or list of location IDs to filter on. locations_excluded : A location ID, or list of location IDs to exclude. congestion_target_location : A congestion location ID, or list of congestion location IDs to filter on. congestion_target_location_excluded : A congestion location ID, or list of congestion location IDs to exclude. vessels : A vessel ID or vessel class, or list of vessel IDs/vessel classes to filter on. vessels_excluded : A vessel ID or vessel class, or list of vessel IDs/vessel classes to exclude. flags : A flag, or list of flags to filter on. flags_excluded : A flag, or list of flags to exclude. ice_class : An ice class, or list of ice classes to filter on. ice_class_excluded : An ice class, or list of ice classes to \u0119xclude. vessel_propulsion : A propulsion method, or list of propulsion methods to filter on. vessel_propulsion_excluded : A propulsion method, or list of propulsion methods to \u0119xclude. vessel_age_min : A number between 1 and 100 (representing years). vessel_age_max : A number between 1 and 100 (representing years). vessel_dwt_min : A number representing minimum deadweight tonnage of a vessel. vessel_dwt_max : A number representing maximum deadweight tonnage of a vessel. vessel_cbm_min : A number representing minimum cubic capacity of a vessel. vessel_cbm_max : A number representing maximum cubic capacity of a vessel. vessel_wait_time_min : A number representing a minimum number of days until a vessel becomes available. vessel_wait_time_max : A number representing a maximum number of days until a vessel becomes available. vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. vessels_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. vessels_tags_excluded : A time bound vessel tag, or list of time bound vessel tags to exclude. vessel_risk_level : A vessel risk level, or list of vessel risk levels to filter on. vessel_risk_level_excluded : A vessel risk level, or list of vessel risk levels to exclude. has_ship_to_ship : A boolean to show data where at least one STS transfer occurs. has_charterer : A boolean to show data where at least one charterer is specified. Returns AggregationBreakdownResult Example Top oritin countries for crude imports to Rotterdam on 8th August 2021. >>> from vortexasdk import VoyagesTopHits, Geographies, Products >>> from datetime import datetime >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> crude = [p.id for p in Products().search(\"crude\").to_list() if \"Crude\" == p.name] >>> start = datetime(2021, 8, 1) >>> end = datetime(2021, 8, 1, 23, 59) >>> search_result = VoyagesTopHits().search( ... time_min=start, ... time_max=end, ... destinations=rotterdam, ... products=crude, ... breakdown_size=5, ... breakdown_split_property=\"origin_country\" ... ).to_list() Gives the following result: [ AggregationBreakdownItem( id='b996521be9c996db', count=8, value=8.0, label='Russia' ), AggregationBreakdownItem( id='2d92cc08f22524db', count=7, value=7.0, label='United States' ), AggregationBreakdownItem( id='2aaad41b89dfad19', count=4, value=4.0, label='United Kingdom' ), AggregationBreakdownItem( id='430f0e467f3a408f', count=2, value=2.0, label='Nigeria' ), AggregationBreakdownItem( id='3eac69e760d9ec57', count=1, value=1.0, label='Egypt' ) ]","title":"Top Hits"},{"location":"entities/breakdown_item/","text":"vortexasdk.api.breakdown_item BreakdownItem BreakdownItem(__pydantic_self__, **data: Any) -> None Generic container class holding a key <> value pair, a count , and optionally a label and a breakdown of records contributing to the given value. For example, this class could hold the average speed of vessels ( value ) on 2019-01-01 ( key ), the number of vessels contributing to the this average (count) and additional information about the aggregation ( breakdown ). If the BreakdownItem is enriched by reference data (e.g. in fleet-utilisation/breakdown/origin ), key is the ID of the reference entity, label holds its name and value and count correspond to numeric values of the returned record.","title":"Breakdown Item"},{"location":"entities/cargo_movement/","text":"vortexasdk.api.cargo_movement ParentID ParentID(__pydantic_self__, **data: Any) -> None cargo_movement_id may change under certain conditions. ParentID contains an id , a previous id of the cargo movement, and a splinter_timestamp , the time at which the id change occurred. Cargo Movement Further Documentation CargoMovement CargoMovement(__pydantic_self__, **data: Any) -> None Cargo movements are the base data set the Vortexa API is centred around. Each movement represents a journey of a certain quantity of a product between places. Cargo Movement Further Documentation","title":"Cargo Movement"},{"location":"entities/corporation/","text":"vortexasdk.api.corporation Corporation Corporation(__pydantic_self__, **data: Any) -> None Represent a Corporation reference record returned by the API. CorporateEntity CorporateEntity(__pydantic_self__, **data: Any) -> None Represents a relationship between a corporation and another entity like a vessel. Corporate Entity Further Documentation","title":"Corporation"},{"location":"entities/geography/","text":"vortexasdk.api.geography BoundingBox BoundingBox(__pydantic_self__, **data: Any) -> None Polygon with list of bounding lon lat coords. Geography Geography(__pydantic_self__, **data: Any) -> None Represent a Geography reference record returned by the API. GeographyEntity GeographyEntity(__pydantic_self__, **data: Any) -> None Represents a hierarchy tree of locational data. Geography Entities Further Documentation","title":"Geography"},{"location":"entities/product/","text":"vortexasdk.api.product Product Product(__pydantic_self__, **data: Any) -> None Represent a Product reference record returned by the API. Product Further Documentation ProductEntityWithSingleLayer ProductEntityWithSingleLayer(__pydantic_self__, **data: Any) -> None Represents a single product layer of a hierarchical product tree. Further Documentation ProductEntityWithListLayer ProductEntityWithListLayer(__pydantic_self__, **data: Any) -> None Represents a single product layer of a hierarchical product tree. Further Documentation","title":"Product"},{"location":"entities/timeseries_item/","text":"vortexasdk.api.timeseries_item TimeSeriesItem TimeSeriesItem(__pydantic_self__, **data: Any) -> None Generic container class holding a key <> value pair, a count of records contributing to the given value. For example, this class could hold the total tonnage exported (value) on 2019-01-01 (key), and the count of cargo movements contributing to this tonnage aggregate, ie the number of cargo movements on this day (count).","title":"Time Series Item"},{"location":"entities/vessel/","text":"vortexasdk.api.vessel Vessel Vessel(__pydantic_self__, **data: Any) -> None Represent a Vessel reference record returned by the API. Vessels Further Documentation VesselEntity VesselEntity(__pydantic_self__, **data: Any) -> None A VesselEntity represents a vessel record used in CargoMovements. Vessel Entities Further Documentation","title":"Vessel"},{"location":"entities/vessel_availability/","text":"vortexasdk.api.vessel_availability DeclaredDestination DeclaredDestination(__pydantic_self__, **data: Any) -> None Current destination location, as reported by the available vessel VesselFixtures VesselFixtures(__pydantic_self__, **data: Any) -> None Current fixture information for the available vessel VesselAvailability VesselAvailability(__pydantic_self__, **data: Any) -> None Vessel Availability shows vessels that are available to load a given cargo at a given port within a specified time range.","title":"Vessel Availability"},{"location":"entities/voyages/","text":"vortexasdk.api.voyages CongestionBreakdownItem CongestionBreakdownItem(__pydantic_self__, **data: Any) -> None Congestion breakdown shows various stats of vessels in congestion. VoyagesVesselEntity VoyagesVesselEntity(__pydantic_self__, **data: Any) -> None A VoyagesVesselEntity represents a vessel record used in Voyages. Vessel Entities Further Documentation VoyageVesselEvent VoyageVesselEvent(__pydantic_self__, **data: Any) -> None A vessel event represents an activity that a vessel has performed during a voyage Voyage Events Further Documentation VoyageCargoEvent VoyageCargoEvent(__pydantic_self__, **data: Any) -> None Cargo events relate to the movement of cargo during the voyage. Voyage Events Further Documentation VoyageStatusEvent VoyageStatusEvent(__pydantic_self__, **data: Any) -> None Status events describe the status of the voyage at a given period. Voyage Events Further Documentation VoyageEnrichedItem VoyageEnrichedItem(__pydantic_self__, **data: Any) -> None A voyage is defined as a continuous period of time when the vessel is either laden or ballast. Each voyage is made up of multiple voyage events which describe the activity of the vessel while it is laden or ballast. Voyages Further Documentation","title":"Voyages"},{"location":"examples/0_sample_load_cargo_movements/","text":"Simple example to retrieving some sample cargo movements in a dataframe. The below script returns something similar to: events.cargo_port_unload_event.0.start_timestamp product.group.label product.grade.label quantity vessels.0.name 0 2019-10-08T00:41:00+0000 Crude Djeno 123457 AROME 1 2019-11-08T00:41:52+0000 Crude Arab Medium 99898 SCOOBYDOO 2 2019-09-30T23:49:41+0000 Crude Arab Heavy 9879878 DAVID 3 2019-12-01T01:40:00+0000 Crude Usan 999999 DUCK from datetime import datetime from vortexasdk import CargoMovements if __name__ == \"__main__\": # Query API to find all vessels that were loading on the 1st of Aug 2019 search_result = CargoMovements().search( filter_activity=\"loading_start\", filter_time_min=datetime(2019, 8, 1), filter_time_max=datetime(2019, 8, 2), ) print(\"Cargo movements successfully loaded\") # Convert search result to dataframe df = search_result.to_df() print(df.head())","title":"0 Simple Load Cargo Movements"},{"location":"examples/1_china/","text":"Let's retrieve all the VLCCs that have discharged into China in the last 3 months. The below script returns: events.cargo_port_unload_event.0.start_timestamp product.group.label product.grade.label quantity vessels.0.name 0 2019-10-08T00:41:00+0000 Crude Djeno 123457 AROME 1 2019-11-08T00:41:52+0000 Crude Arab Medium 99898 SCOOBYDOO 2 2019-09-30T23:49:41+0000 Crude Arab Heavy 9879878 DAVID 3 2019-12-01T01:40:00+0000 Crude Usan 999999 DUCK from datetime import datetime from vortexasdk import CargoMovements, Geographies, Vessels if __name__ == \"__main__\": # Find china ID china = Geographies().search(term=\"China\", exact_term_match=True).to_list()[0].id # Find the ID of all VLCCs vlccs = [ v.id for v in Vessels().search(vessel_classes=\"vlcc_plus\").to_list() ] # Query API search_result = CargoMovements().search( filter_activity=\"loading_start\", filter_vessels=vlccs, filter_destinations=china, filter_time_min=datetime(2019, 9, 29), filter_time_max=datetime(2019, 10, 30), ) # Convert search result to dataframe df = search_result.to_df()","title":"1 China VLCC Discharges"},{"location":"examples/2_crude_from_saudi_arabia_to_india/","text":"Let's find all crude cargo movements from Saudi Arabia to India that loaded in the last month. The below script returns a pd.DataFrame , similar to the table given in the movements tab of https://analytics.vortexa.com , filtering on Products: Crude with Origin: Saudi Arabia , Destination: India and Date Range: Departures in the last Month . from datetime import datetime from dateutil.relativedelta import relativedelta from vortexasdk import CargoMovements, Geographies, Products if __name__ == \"__main__\": now = datetime.utcnow() one_month_ago = now - relativedelta(months=1) # For this analysis we need the geography ID for India, and the geography ID for Saudi Arabia. We're going to # show 2 ways to retrieve geography IDs. You'll want to chose method 1 or 2 depending on your use case. # Option 1. We look up a geography with an exact matching name saudi_arabia = ( Geographies() .search(\"Saudi Arabia\", exact_term_match=True) .to_list()[0] .id ) # Option 2. We search for geographies with similar names, then pick the one we're looking for # First we find the ID for the country India. Note that when searching geographies with the term 'india', we'll # retrieve all geographies with india in the name, ie Indiana, British Indian Ocean Territory... all_geogs_with_india_in_the_name = Geographies().search(\"india\").to_list() # If running interactively, you may want to print all the names here to inspect them for yourself for g in all_geogs_with_india_in_the_name: print(g.name) # We're only interested in the country India here india = [ g.id for g in all_geogs_with_india_in_the_name if g.name == \"India\" ] # Check we've only got one ID for India assert len(india) == 1 # Let's find the Crude ID, # here we know the exact name of the product we're looking for so we set exact_term_match=True crude = Products().search(\"Crude\", exact_term_match=True).to_list()[0].id # Query the API. search_result = CargoMovements().search( filter_activity=\"loading_end\", filter_origins=saudi_arabia, filter_destinations=india, filter_products=crude, filter_time_min=one_month_ago, filter_time_max=now, ) # A complete list of available columns can be found at https://vortechsa.github.io/python-sdk/endpoints/cargo_movements/#notes # We only require a subset of available columns here required_columns = [ # A cargo movement can be carried by multiple vessels across various STS transfers. You can find all the vessels that # the cargo was onboard by inspecting the 'vessels.0', 'vessels.1' columns etc. # The 'vessels.0' columns shows the primary vessel associated with the cargo movement \"vessels.0.name\", \"vessels.0.vessel_class\", # Here we show any corporate information associated with the primary vessel \"vessels.0.corporate_entities.charterer.label\", \"vessels.0.corporate_entities.time_charterer.label\", \"vessels.0.corporate_entities.effective_controller.label\", # Show the product information and quantity \"product.group.label\", \"product.grade.label\", \"quantity\", # Is the vessel in transit, has it already discharged, or is it in floating storage? \"status\", # Show the loading Port name, and the loading timestamp \"events.cargo_port_load_event.0.location.port.label\", \"events.cargo_port_load_event.0.end_timestamp\", # Show the discharge Port name, and the discharge timestamp \"events.cargo_port_unload_event.0.location.port.label\", \"events.cargo_port_unload_event.0.end_timestamp\", ] # Convert the search result to a dataframe df = search_result.to_df(columns=required_columns) # Sort the dataframe by loading timestamp df = df.sort_values(by=[\"events.cargo_port_load_event.0.end_timestamp\"])","title":"2 crude from saudi arabia to india"},{"location":"examples/3_chinese_daily_imports/","text":"Let's retrieve the daily sum of Chinese Crude/Condensate imports, across January 2019. The below script returns: key value count 0 2019-01-01T00:00:00.000Z 1237381 9 1 2019-01-02T00:00:00.000Z 6548127 23 2 2019-01-03T00:00:00.000Z 45457617 23 3 2019-01-04T00:00:00.000Z 6467759 43 4 2019-01-05T00:00:00.000Z 7777144 4 ... from datetime import datetime from vortexasdk import CargoTimeSeries, Geographies, Products if __name__ == \"__main__\": # Find china ID, here we're only looking for geographies with the exact name China, so we set exact_term_match=True china = Geographies().search(term=\"China\", exact_term_match=True).to_list()[0].id # Find Crude/Condensates ID. # Again, we know the exact name of the product we're searching for, so we set exact_term_match=True crude_condensates = Products().search(term=\"Crude/Condensates\", exact_term_match=True).to_list()[0].id # Query API search_result = CargoTimeSeries().search( # We're only interested in movements into China filter_destinations=china, # We're looking at daily imports timeseries_frequency=\"day\", # We want 'b' for barrels here timeseries_unit=\"b\", # We're only interested in Crude/Condensates filter_products=crude_condensates, # We want all cargo movements that unloaded in January 2019 to be included filter_activity=\"unloading_start\", filter_time_min=datetime(2019, 1, 1), filter_time_max=datetime(2019, 2, 1), ) # Convert search result to dataframe df = search_result.to_df()","title":"3 chinese daily imports"},{"location":"examples/4_medium_sour_floating_storage/","text":"Let's see how much Medium-Sour Crude is in long term floating storage, in January 2019. The below script returns: key value count 0 2019-01-01T00:00:00.000Z 7381 9 1 2019-01-02T00:00:00.000Z 8127 23 2 2019-01-03T00:00:00.000Z 2333 32 3 2019-01-04T00:00:00.000Z 447759 43 4 2019-01-05T00:00:00.000Z 7777144 4 ... from datetime import datetime from docs.utils import to_markdown from vortexasdk import CargoTimeSeries, Products if __name__ == \"__main__\": # Find Medium Sour ID medium_sour = [ p.id for p in Products().search(term=\"Medium-Sour\").to_list() if p.name == \"Medium-Sour\" ] # Check we've only got one ID assert len(medium_sour) == 1 # Query API search_result = CargoTimeSeries().search( # We're looking at daily storage levels timeseries_frequency=\"day\", # We want 'b' for barrels here timeseries_unit=\"b\", # We're only interested in storage of Medium-Sour Crude filter_products=medium_sour, # We're only included in cargo's that were in floating storage filter_activity=\"storing_state\", # We're only interested in floating storage that lasted longer than 14 days timeseries_activity_time_span_min=1000 * 60 * 60 * 24 * 14, # Let's limit the search to January 2019 storage events filter_time_min=datetime(2019, 1, 1), filter_time_max=datetime(2019, 2, 1), ) # Convert search result to dataframe df = search_result.to_df() print(to_markdown(df.head()))","title":"4 medium sour floating storage"},{"location":"examples/jupyter_notebooks/","text":"Example Jupyter Notebooks The examples section of the vortexasdk GitHub repository, hosts a variety of Jupyter Notebooks that illustrate some real-life use-cases of how the SDK can be used to answer interesting industry questions, identify emerging patterns & trends and assist forecasting models. Those notebooks include: New to Python - Crude and Condensates in Floating Storage US Crude Exports Exploration & Forecasting China Oil Flows during the Covid-19 Outbreak Crude Floating Storage and its relation to Prices Financial services use cases notebook","title":"Jupyter Notebooks"},{"location":"examples/jupyter_notebooks/#example-jupyter-notebooks","text":"The examples section of the vortexasdk GitHub repository, hosts a variety of Jupyter Notebooks that illustrate some real-life use-cases of how the SDK can be used to answer interesting industry questions, identify emerging patterns & trends and assist forecasting models. Those notebooks include: New to Python - Crude and Condensates in Floating Storage US Crude Exports Exploration & Forecasting China Oil Flows during the Covid-19 Outbreak Crude Floating Storage and its relation to Prices Financial services use cases notebook","title":"Example Jupyter Notebooks"}]} \ No newline at end of file +{"config":{"indexing":"full","lang":["en"],"min_search_length":3,"prebuild_index":false,"separator":"[\\s\\-]+"},"docs":[{"location":"","text":"VortexaSDK Welcome to Vortexa's Python Software Development Kit (SDK)! We built the SDK to provide fast, interactive, programmatic exploration of our data. The tool lets Data Scientists, Analysts and Developers efficiently explore the world\u2019s waterborne oil movements, and to build custom models & reports with minimum setup cost. The SDK sits as a thin python wrapper around Vortexa's API , giving you immediate access to pandas DataFrames. Example In an interactive Python console, run: >>> from datetime import datetime >>> from vortexasdk import CargoMovements >>> df = CargoMovements()\\ .search(filter_activity='loading_state', filter_time_min=datetime(2017, 8, 2), filter_time_max=datetime(2017, 8, 3))\\ .to_df() returns: quantity vessels.0.name product.group.label product.grade.label events.cargo_port_load_event.0.end_timestamp events.cargo_port_unload_event.0.start_timestamp 0 1998 ALSIA SWAN Clean products Lube Oils 2017-08-01T06:10:45+0000 2017-08-27T14:38:15+0000 1 16559 IVER Dirty products nan 2017-08-02T17:20:51+0000 2017-09-07T07:52:20+0000 2 522288 BLUE SUN Crude Gharib 2017-08-02T04:22:09+0000 2017-08-13T10:32:09+0000 Quick Start Try me out in your browser: Installation $ pip install vortexasdk The SDK requires Python versions between 3.7 and 3.10. See Setup FAQ for more details. To install the SDK on an Apple ARM-based machine, use Python versions between 3.7 and 3.10 and use the latest version of pip. This is supported in the SDK versions 0.41.0 or higher. Authentication Set your VORTEXA_API_KEY environment variable, that's all. Alternatively, the SDK prompts to you enter your API Key when running a script interactively. To get an API key and experiment with Vortexa's data, you can request a demo here . Check Setup To check the SDK is setup correctly, run the following in a bash console: $ python -m vortexasdk.check_setup A successful setup looks like this: Next Steps Learn how to call Endpoints Glossary The Glossary can be found at Vortexa API Documentation . The Glossary outlines key terms, functions and assumptions aimed at helping to extract powerful findings from our data. Documentation Read the documentation at VortexaSDK Docs Contributing We welcome contributions! Please read our Contributing Guide for ways to offer feedback and contributions. Thanks goes to these wonderful contributors ( emoji key ): Kit Burgess \ud83c\udfa8 \ud83d\udcbb tinovs \ud83d\udcbb \ud83d\udc40 David Andrew Starkey \ud83d\udcbb \ud83d\udcd6 \ud83d\udca1 syed \ud83d\udc40 Jakub Korzeniowski \ud83e\udd14 Edward Wright \ud83d\udcd3 Patrick Roddy \ud83d\udcd3 Romain \ud83d\udcd3 \ud83e\udd14 Natday \ud83d\udcbc \ud83e\udd14 \ud83d\udcd3 ArthurD1 \ud83d\udcd3 Chloe Connor \ud83d\udcd3 Achilleas Sfakianakis \ud83d\udcd3 Sean Barry \ud83d\udcbb \ud83d\udcd6 Konrad Moskal \ud83d\udcbb Pawel Pietruszka \ud83d\udcbb This project follows the all-contributors specification. Contributions of any kind welcome!","title":"Home"},{"location":"#vortexasdk","text":"Welcome to Vortexa's Python Software Development Kit (SDK)! We built the SDK to provide fast, interactive, programmatic exploration of our data. The tool lets Data Scientists, Analysts and Developers efficiently explore the world\u2019s waterborne oil movements, and to build custom models & reports with minimum setup cost. The SDK sits as a thin python wrapper around Vortexa's API , giving you immediate access to pandas DataFrames.","title":"VortexaSDK"},{"location":"#example","text":"In an interactive Python console, run: >>> from datetime import datetime >>> from vortexasdk import CargoMovements >>> df = CargoMovements()\\ .search(filter_activity='loading_state', filter_time_min=datetime(2017, 8, 2), filter_time_max=datetime(2017, 8, 3))\\ .to_df() returns: quantity vessels.0.name product.group.label product.grade.label events.cargo_port_load_event.0.end_timestamp events.cargo_port_unload_event.0.start_timestamp 0 1998 ALSIA SWAN Clean products Lube Oils 2017-08-01T06:10:45+0000 2017-08-27T14:38:15+0000 1 16559 IVER Dirty products nan 2017-08-02T17:20:51+0000 2017-09-07T07:52:20+0000 2 522288 BLUE SUN Crude Gharib 2017-08-02T04:22:09+0000 2017-08-13T10:32:09+0000","title":"Example"},{"location":"#quick-start","text":"Try me out in your browser:","title":"Quick Start"},{"location":"#installation","text":"$ pip install vortexasdk The SDK requires Python versions between 3.7 and 3.10. See Setup FAQ for more details. To install the SDK on an Apple ARM-based machine, use Python versions between 3.7 and 3.10 and use the latest version of pip. This is supported in the SDK versions 0.41.0 or higher.","title":"Installation"},{"location":"#authentication","text":"Set your VORTEXA_API_KEY environment variable, that's all. Alternatively, the SDK prompts to you enter your API Key when running a script interactively. To get an API key and experiment with Vortexa's data, you can request a demo here .","title":"Authentication"},{"location":"#check-setup","text":"To check the SDK is setup correctly, run the following in a bash console: $ python -m vortexasdk.check_setup A successful setup looks like this:","title":"Check Setup"},{"location":"#next-steps","text":"Learn how to call Endpoints","title":"Next Steps"},{"location":"#glossary","text":"The Glossary can be found at Vortexa API Documentation . The Glossary outlines key terms, functions and assumptions aimed at helping to extract powerful findings from our data.","title":"Glossary"},{"location":"#documentation","text":"Read the documentation at VortexaSDK Docs","title":"Documentation"},{"location":"#contributing","text":"We welcome contributions! Please read our Contributing Guide for ways to offer feedback and contributions. Thanks goes to these wonderful contributors ( emoji key ): Kit Burgess \ud83c\udfa8 \ud83d\udcbb tinovs \ud83d\udcbb \ud83d\udc40 David Andrew Starkey \ud83d\udcbb \ud83d\udcd6 \ud83d\udca1 syed \ud83d\udc40 Jakub Korzeniowski \ud83e\udd14 Edward Wright \ud83d\udcd3 Patrick Roddy \ud83d\udcd3 Romain \ud83d\udcd3 \ud83e\udd14 Natday \ud83d\udcbc \ud83e\udd14 \ud83d\udcd3 ArthurD1 \ud83d\udcd3 Chloe Connor \ud83d\udcd3 Achilleas Sfakianakis \ud83d\udcd3 Sean Barry \ud83d\udcbb \ud83d\udcd6 Konrad Moskal \ud83d\udcbb Pawel Pietruszka \ud83d\udcbb This project follows the all-contributors specification. Contributions of any kind welcome!","title":"Contributing"},{"location":"config/","text":"Config The VortexaSDK can be configured using environment variables. Environment Variable Default Description VORTEXA_API_KEY none API Key used to access the VortexaAPI. Refer to Vortexa API Authentication for more details, including instructions on where to find your API key. LOG_FILE none Output log file LOG_LEVEL INFO Configure the level of must be one of [\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"] HTTP_PROXY none Send API requests via a corporate http proxy. This environment variable is used by the requests library, see here for further details HTTPS_PROXY none Send API requests via a corporate https proxy. This environment variable is used by the requests library, see here for further details","title":"Config"},{"location":"config/#config","text":"The VortexaSDK can be configured using environment variables. Environment Variable Default Description VORTEXA_API_KEY none API Key used to access the VortexaAPI. Refer to Vortexa API Authentication for more details, including instructions on where to find your API key. LOG_FILE none Output log file LOG_LEVEL INFO Configure the level of must be one of [\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"] HTTP_PROXY none Send API requests via a corporate http proxy. This environment variable is used by the requests library, see here for further details HTTPS_PROXY none Send API requests via a corporate https proxy. This environment variable is used by the requests library, see here for further details","title":"Config"},{"location":"faq/","text":"Why do my requests hang or run in an infinite loop or I get a RuntimeError (\"An attempt has been made to start a new process...\")? On Windows, it may be that your script doesn't check if __name__ == \"__main__\": before calling the SDK. Your script should contain this check, just like in this example For more details on why if __name__ == \"__main__\": is required, check out those interesting stack overflow posts here and here on Windows multiprocessing. How do I use the SDK with a corporate proxy? To send SDK requests via a proxy, you can set the HTTP_PROXY or HTTPS_PROXY environment variables. More detail is given in the requests library docs here What's the difference between a cargo movement and a vessel movement? A cargo movement is defined as the complete journey of a quantity of oil from its origin terminal to its destination terminal, including all ship to ship (STS) transfers in-between. For example: Tanker X loads 1mn bl of crude from Houston and discharges onto another tanker Y offshore the US Gulf, which then discharges in Singapore. The cargo movement is for 1mn bl of crude oil from Houston to Singapore. The vessel movement for tanker X is Houston to US Gulf, while for tanker Y it is US Gulf to Singapore. When there is no STS transfer, a cargo movement and vessel movement is equivalent. A more detailed explanation can be found here Where can I find a list of products? Check out the Vortexa Glossary, which can be downloaded from here What's the difference in a trading region and a geographic region? Trading regions have been designed by Vortexa to try and group terminals, ports and countries around oil market conventions (e.g. northwest Europe, west Africa, etc) whereas geographic regions are much wider in scope (e.g. North America, Asia, Africa). Trading regions are more granular than geographic regions.","title":"General"},{"location":"faq/#why-do-my-requests-hang-or-run-in-an-infinite-loop-or-i-get-a-runtimeerror-an-attempt-has-been-made-to-start-a-new-process","text":"On Windows, it may be that your script doesn't check if __name__ == \"__main__\": before calling the SDK. Your script should contain this check, just like in this example For more details on why if __name__ == \"__main__\": is required, check out those interesting stack overflow posts here and here on Windows multiprocessing.","title":"Why do my requests hang or run in an infinite loop or I get a RuntimeError (\"An attempt has been made to start a new process...\")?"},{"location":"faq/#how-do-i-use-the-sdk-with-a-corporate-proxy","text":"To send SDK requests via a proxy, you can set the HTTP_PROXY or HTTPS_PROXY environment variables. More detail is given in the requests library docs here","title":"How do I use the SDK with a corporate proxy?"},{"location":"faq/#whats-the-difference-between-a-cargo-movement-and-a-vessel-movement","text":"A cargo movement is defined as the complete journey of a quantity of oil from its origin terminal to its destination terminal, including all ship to ship (STS) transfers in-between. For example: Tanker X loads 1mn bl of crude from Houston and discharges onto another tanker Y offshore the US Gulf, which then discharges in Singapore. The cargo movement is for 1mn bl of crude oil from Houston to Singapore. The vessel movement for tanker X is Houston to US Gulf, while for tanker Y it is US Gulf to Singapore. When there is no STS transfer, a cargo movement and vessel movement is equivalent. A more detailed explanation can be found here","title":"What's the difference between a cargo movement and a vessel movement?"},{"location":"faq/#where-can-i-find-a-list-of-products","text":"Check out the Vortexa Glossary, which can be downloaded from here","title":"Where can I find a list of products?"},{"location":"faq/#whats-the-difference-in-a-trading-region-and-a-geographic-region","text":"Trading regions have been designed by Vortexa to try and group terminals, ports and countries around oil market conventions (e.g. northwest Europe, west Africa, etc) whereas geographic regions are much wider in scope (e.g. North America, Asia, Africa). Trading regions are more granular than geographic regions.","title":"What's the difference in a trading region and a geographic region?"},{"location":"faq_setup/","text":"How to install Python on Windows using Anaconda Download the Python3.7 Graphical installer from the anaconda website Follow the conda installation instructions How do I install the SDK on Windows? First, open up an Anaconda Prompt. Hit the start button and type anaconda prompt. Use pip to install the sdk Run pip install --user vortexasdk in the anaconda command prompt You're done! The VortexaSDK has now been installed. How do I install the SDK on Mac / Linux? Type the following into a bash terminal $ pip install vortexasdk How do I add an environment variable on Windows? Hit the windows key, then type \"environment\" to open up a control panel settings page titled \"Edit the system environment variables\" In the System Properties window, click on the Advanced tab, then click the Environment Variables button near the bottom of that tab. Add a new user variable Where is my API Key? Refer to Vortexa API Authentication for details, including instructions on where to find your API key. How do I request an API Key? You can request a demo here . More details are given in docs.vortexa.com . Alternatively, please get in touch at www.vortexa.com . How can I check the SDK is setup correctly? Run the following in a bash console on Mac/Linux, or command prompt on Windows: $ python -m vortexasdk.check_setup A successful setup looks like this: On Windows, you'll need to paste the API key by right clicking the console menu, like so: Hitting Ctrl+V won't paste the API key, this is due to a known python windows bug","title":"Setup"},{"location":"faq_setup/#how-to-install-python-on-windows-using-anaconda","text":"Download the Python3.7 Graphical installer from the anaconda website Follow the conda installation instructions","title":"How to install Python on Windows using Anaconda"},{"location":"faq_setup/#how-do-i-install-the-sdk-on-windows","text":"First, open up an Anaconda Prompt. Hit the start button and type anaconda prompt. Use pip to install the sdk Run pip install --user vortexasdk in the anaconda command prompt You're done! The VortexaSDK has now been installed.","title":"How do I install the SDK on Windows?"},{"location":"faq_setup/#how-do-i-install-the-sdk-on-mac-linux","text":"Type the following into a bash terminal $ pip install vortexasdk","title":"How do I install the SDK on Mac / Linux?"},{"location":"faq_setup/#how-do-i-add-an-environment-variable-on-windows","text":"Hit the windows key, then type \"environment\" to open up a control panel settings page titled \"Edit the system environment variables\" In the System Properties window, click on the Advanced tab, then click the Environment Variables button near the bottom of that tab. Add a new user variable","title":"How do I add an environment variable on Windows?"},{"location":"faq_setup/#where-is-my-api-key","text":"Refer to Vortexa API Authentication for details, including instructions on where to find your API key.","title":"Where is my API Key?"},{"location":"faq_setup/#how-do-i-request-an-api-key","text":"You can request a demo here . More details are given in docs.vortexa.com . Alternatively, please get in touch at www.vortexa.com .","title":"How do I request an API Key?"},{"location":"faq_setup/#how-can-i-check-the-sdk-is-setup-correctly","text":"Run the following in a bash console on Mac/Linux, or command prompt on Windows: $ python -m vortexasdk.check_setup A successful setup looks like this: On Windows, you'll need to paste the API key by right clicking the console menu, like so: Hitting Ctrl+V won't paste the API key, this is due to a known python windows bug","title":"How can I check the SDK is setup correctly?"},{"location":"config/config/","text":"Config The VortexaSDK can be configured using environment variables. Environment Variable Default Description VORTEXA_API_KEY none API Key used to access the VortexaAPI. Refer to Vortexa API Authentication for more details, including instructions on where to find your API key. LOG_FILE none Output log file LOG_LEVEL INFO Configure the level of must be one of [\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"] HTTP_PROXY none Send API requests via a corporate http proxy. This environment variable is used by the requests library, see here for further details HTTPS_PROXY none Send API requests via a corporate https proxy. This environment variable is used by the requests library, see here for further details","title":"Config"},{"location":"config/config/#config","text":"The VortexaSDK can be configured using environment variables. Environment Variable Default Description VORTEXA_API_KEY none API Key used to access the VortexaAPI. Refer to Vortexa API Authentication for more details, including instructions on where to find your API key. LOG_FILE none Output log file LOG_LEVEL INFO Configure the level of must be one of [\"DEBUG\", \"INFO\", \"WARNING\", \"ERROR\", \"CRITICAL\"] HTTP_PROXY none Send API requests via a corporate http proxy. This environment variable is used by the requests library, see here for further details HTTPS_PROXY none Send API requests via a corporate https proxy. This environment variable is used by the requests library, see here for further details","title":"Config"},{"location":"endpoints/about-endpoints/","text":"About VortexaSDK Endpoints The endpoints module allows you to query Vortexa's data. The VortexaSDK currently contains the following endpoints: Cargo Movements Voyages Charterers Geographies Products Vessels Cargo Time Series EIA Forecasts Tonne-miles Vessel Availability Crude Onshore Inventories Freight Pricing Each endpoint offers either one, or both, of two different functionalities: Lookup by ID . Retrieve an object matching a certain id. In sql speak this is the equivalent of SELECT * FROM vessels WHERE id = 12345; Search . Retrieve a number of objects matching given search parameters. In sql speak this is the equivalent of SELECT * FROM vessels WHERE name ~* 'ocean' AND vessel_class = 'vlcc'; Let's explain with some examples: Find all aframax vessels from vortexasdk import Vessels df = Vessels().search(vessel_classes='aframax').to_df() Find the vessel that has with id 12345 vessel = Vessels().reference(id='12345')","title":"About Endpoints"},{"location":"endpoints/about-endpoints/#about-vortexasdk-endpoints","text":"The endpoints module allows you to query Vortexa's data. The VortexaSDK currently contains the following endpoints: Cargo Movements Voyages Charterers Geographies Products Vessels Cargo Time Series EIA Forecasts Tonne-miles Vessel Availability Crude Onshore Inventories Freight Pricing Each endpoint offers either one, or both, of two different functionalities: Lookup by ID . Retrieve an object matching a certain id. In sql speak this is the equivalent of SELECT * FROM vessels WHERE id = 12345; Search . Retrieve a number of objects matching given search parameters. In sql speak this is the equivalent of SELECT * FROM vessels WHERE name ~* 'ocean' AND vessel_class = 'vlcc'; Let's explain with some examples: Find all aframax vessels from vortexasdk import Vessels df = Vessels().search(vessel_classes='aframax').to_df() Find the vessel that has with id 12345 vessel = Vessels().reference(id='12345')","title":"About VortexaSDK Endpoints"},{"location":"endpoints/asset_tanks/","text":"vortexasdk.endpoints.asset_tanks Try me out in your browser: AssetTanks AssetTanks(self) Asset Tanks endpoint. An Asset Tank is a reference value that corresponds to an ID associated with other entities. For example, an Asset Tank object may have the following keys: { \"name\": \"AAM001\", \"storage_type\": \"tdb\" \"crude_confidence\": \"confirmed\" ... } IDs represent asset tanks which can be found via the Asset Tank reference endpoint. When the asset tanks endpoint is searched with those ids as parameters: >>> from vortexasdk import AssetTanks >>> df = AssetTanks().search(ids=[\"6114b93026e61993797db33a46a5d2acbeacdbd63238a4271efaeafcee94b1d2\"]).to_df() Returns id capacity_bbl crude_confidence location_id name storage_type lat lon 0 6114b93026e61993797d... 645201 confirmed b839dc5fee39ff7efd5e1cf2494... AAM001 tbd 90 180 load_all AssetTanks.load_all(self) -> vortexasdk.endpoints.asset_tanks_result.AssetTankResult Load all asset tanks. search AssetTanks.search(self, ids: Union[str, List[str]] = None, corporate_entity_ids: Union[str, List[str]] = None, crude_confidence: List[str] = None, location_ids: Union[str, List[str]] = None, storage_type: List[str] = None, term: Union[str, List[str]] = None) -> vortexasdk.endpoints.asset_tanks_result.AssetTankResult Find all asset tanks matching given type. Arguments ids : An array of unique Asset Tanks ID(s) to filter on. corporate_entity_ids : An array of owner ID(s) to filter on. crude_confidence : An array of confidence metrics to filter on. Possible values are: 'confirmed\u2019 , \u2018probable\u2019 , \u2018unlikely\u2019 location_ids : An array of geography ID(s) to filter on. storage_types : An array of storage types to filter on. Possible values are: 'refinery' , 'non-refinery' , 'commercial' , 'spr' , 'tbd' Returns List of asset tanks matching type Examples Find all asset tanks with a storage_type of refinery . >>> from vortexasdk import AssetTanks >>> df = AssetTanks().search(storage_type=[\"refinery\"]).to_df() Returns id capacity_bbl crude_confidence location_id name storage_type lat lon 0 0a736a1816c0fea49a88... 104815 probable f726416f49adcac6d5d296c49a00... HOM009 refinery -60 24 1 b96adfb025a719b66927... 139279 unlikely f726416f49adcac6d5d296c49a00... HOM022 refinery 100 -90 vortexasdk.endpoints.asset_tanks_result AssetTankResult AssetTankResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Asset Tanks endpoint. to_list AssetTankResult.to_list(self) -> List[vortexasdk.api.asset_tank.AssetTank] Represent asset tanks as a list. to_df AssetTankResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent asset tanks as a pd.DataFrame . Arguments columns : The asset tanks features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'capacity_bbl', 'crude_confidence', 'location_id', 'name', 'storage_type', 'lat', 'lon'] . Returns pd.DataFrame of asset tanks.","title":"Asset Tanks"},{"location":"endpoints/attributes/","text":"vortexasdk.endpoints.attributes Try me out in your browser: Attributes Attributes(self) Attributes endpoint. An Attribute is a reference value that corresponds to an ID associated with other entities. For example, a vessel object from the Vessel reference endpoint may have the following keys: { \"ice_class\": \"b09ed4e2bd6904dd\", \"propulsion\": \"3ace0e050724707b\" } These IDs represent attributes which can be found via the Attributes reference endpoint. When the attributes endpoint is searched with those ids as parameters: >>> from vortexasdk import Attributes >>> df = Attributes().search(ids=[\"b09ed4e2bd6904dd\", \"3ace0e050724707b\"]).to_df() Returns id type label 0 b09ed4e2bd6904dd ice_class UNKNOWN 1 3ace0e050724707b propulsion DFDE load_all Attributes.load_all(self) -> vortexasdk.endpoints.attributes_result.AttributeResult Load all attributes. search Attributes.search(self, type: str = None, term: Union[str, List[str]] = None, ids: Union[str, List[str]] = None) -> vortexasdk.endpoints.attributes_result.AttributeResult Find all attributes matching given type. Arguments type : The type of attribute we're filtering on. Type can be: ice_class , propulsion , scrubber Returns List of attributes matching type Examples Find all attributes with a type of ice_class . >>> from vortexasdk import Attributes >>> df = Attributes().search(type=\"scrubber\").to_df() returns id name type 0 14c7b073809eb565 Open Loop scrubber 1 478fca39000c49d6 Unknown scrubber vortexasdk.endpoints.attributes_result AttributeResult AttributeResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Attributes endpoint. to_list AttributeResult.to_list(self) -> List[vortexasdk.api.attribute.Attribute] Represent attributes as a list. to_df AttributeResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent attributes as a pd.DataFrame . Arguments columns : The attributes features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'type'] . Returns pd.DataFrame of attributes.","title":"Attributes"},{"location":"endpoints/cargo_movements/","text":"vortexasdk.endpoints.cargo_movements Try me out in your browser: CargoMovements CargoMovements(self) Cargo Movements Endpoint, use this to search through Vortexa's cargo movements. A detailed explanation of Cargo/Vessel Movements can be found here . search CargoMovements.search(self, filter_activity: str = None, filter_time_min: datetime.datetime = datetime.datetime(2023, 9, 11, 11, 19, 18, 998274), filter_time_max: datetime.datetime = datetime.datetime(2023, 9, 11, 11, 19, 18, 998277), cm_unit: str = 'b', filter_charterers: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, disable_geographic_exclusion_rules: bool = None) -> vortexasdk.endpoints.cargo_movements_result.CargoMovementsResult Find CargoMovements matching the given search parameters. Arguments filter_activity : Movement activity on which to base the time filter. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'unloaded_state', 'storing_state', 'storing_start', 'storing_end', 'transiting_state', 'any_activity', 'oil_on_water_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. cm_unit : Unit of measurement. Enter 'b' for barrels or 't' for tonnes. filter_charterers : A charterer ID, or list of charterer IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_flags : A vessel flag, or list of vessel flags to filter on. filter_vessel_ice_class : An attribute ID, or list of attribute IDs to filter on. filter_vessel_propulsion : An attribute ID, or list of attribute IDs to filter on. exclude_origins : A geography ID, or list of geography IDs to exclude. exclude_destinations : A geography ID, or list of geography IDs to exclude. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_charterers : A charterer ID, or list of charterer IDs to exclude. exclude_filter_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_flags : A geography ID, or list of geography IDs to exclude. exclude_vessel_ice_class : An attribute ID, or list of attribute IDs to exclude. exclude_vessel_propulsion : An attribute ID, or list of attribute IDs to exclude. disable_geographic_exclusion_rules : This controls a popular industry term \"intra-movements\" and determines the filter behaviour for cargo leaving then entering the same geographic area. Returns CargoMovementsResult , containing all the cargo movements matching the given search terms. Example Which cargoes were loaded from Rotterdam on the morning of 1st December 2018? >>> from vortexasdk import CargoMovements, Geographies >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> search_result = CargoMovements().search( ... filter_origins=rotterdam, ... filter_activity='loading_state', ... filter_time_min=datetime(2018, 12, 1), ... filter_time_max=datetime(2018, 12, 1, 12)) >>> df = search_result.to_df(columns=['product.grade.label', 'product.group.label', 'vessels.0.vessel_class']) product.group.label product.grade.label vessels.0.vessel_class 0 Clean products Pygas general_purpose 1 Clean products Chemicals tiny_tanker 2 Clean products Chemicals tiny_tanker 3 Dirty products Low Sulphur VGO (LSVGO) general_purpose 4 Clean products ULSD (Ultra Low Sulphur Diesel) general_purpose 5 Clean products Chemicals tiny_tanker 6 Clean products Finished Gasoline handymax Which VLCC cargoes passed through the Suez canal en route to China? Note here we include vessels.0..., vessels.1..., vessels.2... columns. This lets us view all vessels present in any STS operations. >>> from vortexasdk import CargoMovements, Geographies, Vessels >>> suez = [g.id for g in Geographies().search(\"suez\").to_list()] >>> china = [g.id for g in Geographies().search(\"china\").to_list() if \"country\" in g.layer] >>> vlccs = [v.id for v in Vessels().search(vessel_classes=\"vlcc_plus\").to_list()] >>> cargo_movement_search_result = CargoMovements().search( ... filter_destinations=china, ... filter_activity=\"loading_state\", ... filter_waypoints=suez, ... filter_vessels=vlccs, ... filter_time_min=datetime(2018, 12, 1), ... filter_time_max=datetime(2018, 12, 1)) >>> cols = ['vessels.0.name', 'vessels.0.vessel_class', 'vessels.1.name', 'vessels.1.vessel_class', 'vessels.2.name', 'vessels.2.vessel_class', 'product.group.label', 'quantity'] >>> cargo_movements_df = cargo_movement_search_result.to_df(columns=cols) vessels.0.name vessels.0.vessel_class vessels.1.name vessels.1.vessel_class vessels.2.name vessels.2.vessel_class product.group.label quantity 0 MINERVA MARINA suezmax COSGLORY LAKE vlcc_plus nan nan Crude 700614 1 BUKHA vlcc_plus nan nan nan nan Crude 1896374 2 ATHENIAN FREEDOM vlcc_plus nan nan nan nan Crude 183537 3 ATINA suezmax DONAT suezmax DS VISION vlcc_plus Crude 896773 4 MINERVA MARINA suezmax COSGLORY LAKE vlcc_plus nan nan Crude 405724 5 MASAL suezmax EKTA vlcc_plus nan nan Crude 997896 6 ATHENIAN FREEDOM vlcc_plus nan nan nan nan Crude 120812 Cargo Movements Endpoint Further Documentation record CargoMovements.record(self, id: str, params: Dict = {}) -> Dict Perform a cargo movement lookup. Arguments id : Cargo movement ID to lookup (long_id or short_id) params : Supported search params: 'unit' : enter 'b' for barrels, 't' for tonnes and 'cbm' for cubic meters Returns Cargo movement record matching the ID Further Documentation: VortexaAPI Cargo Movement vortexasdk.endpoints.cargo_movements_result CargoMovementsResult CargoMovementsResult(__pydantic_self__, **data: Any) -> None Container class holdings search results returns from the cargo movements endpoint. This class has two methods, to_list() , and to_df() , allowing search results to be represented as a list of CargoMovements , or as a pd.DataFrame , respectively. to_list CargoMovementsResult.to_list(self) -> List[vortexasdk.api.cargo_movement.CargoMovement] Represent cargo movements as a list of CargoMovementEntity s. to_df CargoMovementsResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent cargo movements as a pd.DataFrame . Arguments columns : Output columns present in the pd.DataFrame . Enter columns='all' to return all available columns. Enter columns=None to use cargo_movements.DEFAULT_COLUMNS . Returns pd.DataFrame , one row per cargo movement. Notes A cargo movement is a complicated, nested structure. Between it's point of loading and discharge, a cargo movement may be carried by N or more vessels, with N-1 associated STS events. Each of these N vessels could have an associated effective controller, charterer, time charterer... etc. In order to represent a cargo movement as a flat (not nested) record in a dataframe, the sdk flattens the cargo movement, generating many columns in the process. The columns are logically named. Let's say that a cargo is transferred between 4 vessels en route from a load in Rotterdam to a discharge in New York. This is represented as 1 cargo_port_unload_event , followed by 3 cargo_sts_event s, and finally 1 cargo_port_unload_event . In this example the name of the 1st vessel, is found in the vessels.0.name column (we're using zero-based numbering indexes). Likewise, the imo of the second vessel is found in the vessels.1.imo column. To find the name of the country in which the second STS event occured, we'd use the events.cargo_sts_event.1.location.country.layer column. Similarly, to find out when the first vessel started loading the cargo from Rotterdam, we'd use the events.cargo_port_load_event.0.start_timestamp column. By default, the columns returned are something along the lines of. DEFAULT_COLUMNS = [ 'events.cargo_port_load_event.0.location.port.label', 'events.cargo_port_unload_event.0.location.port.label', 'product.group.label', 'product.grade.label', 'quantity', 'vessels.0.name', 'events.cargo_port_load_event.0.end_timestamp', 'events.cargo_port_unload_event.0.start_timestamp', ] The exact default columns used can be found at cargo_movements.DEFAULT_COLUMNS A near complete list of columns is given below [ 'cargo_movement_id', 'events.cargo_fso_load_event.0.end_timestamp', 'events.cargo_fso_load_event.0.event_type', 'events.cargo_fso_load_event.0.fso_vessel_id', 'events.cargo_fso_load_event.0.fso_vessel_name', 'events.cargo_fso_load_event.0.location.country.id', 'events.cargo_fso_load_event.0.location.country.label', 'events.cargo_fso_load_event.0.location.country.layer', 'events.cargo_fso_load_event.0.location.country.probability', 'events.cargo_fso_load_event.0.location.country.source', 'events.cargo_fso_load_event.0.location.region.id', 'events.cargo_fso_load_event.0.location.region.label', 'events.cargo_fso_load_event.0.location.region.layer', 'events.cargo_fso_load_event.0.location.region.probability', 'events.cargo_fso_load_event.0.location.region.source', 'events.cargo_fso_load_event.0.location.shipping_region.id', 'events.cargo_fso_load_event.0.location.shipping_region.label', 'events.cargo_fso_load_event.0.location.shipping_region.layer', 'events.cargo_fso_load_event.0.location.shipping_region.probability', 'events.cargo_fso_load_event.0.location.shipping_region.source', 'events.cargo_fso_load_event.0.location.sts_zone.id', 'events.cargo_fso_load_event.0.location.sts_zone.label', 'events.cargo_fso_load_event.0.location.sts_zone.layer', 'events.cargo_fso_load_event.0.location.sts_zone.probability', 'events.cargo_fso_load_event.0.location.sts_zone.source', 'events.cargo_fso_load_event.0.location.trading_block.id', 'events.cargo_fso_load_event.0.location.trading_block.label', 'events.cargo_fso_load_event.0.location.trading_block.layer', 'events.cargo_fso_load_event.0.location.trading_block.probability', 'events.cargo_fso_load_event.0.location.trading_block.source', 'events.cargo_fso_load_event.0.location.trading_region.id', 'events.cargo_fso_load_event.0.location.trading_region.label', 'events.cargo_fso_load_event.0.location.trading_region.layer', 'events.cargo_fso_load_event.0.location.trading_region.probability', 'events.cargo_fso_load_event.0.location.trading_region.source', 'events.cargo_fso_load_event.0.location.trading_subregion.id', 'events.cargo_fso_load_event.0.location.trading_subregion.label', 'events.cargo_fso_load_event.0.location.trading_subregion.layer', 'events.cargo_fso_load_event.0.location.trading_subregion.probability', 'events.cargo_fso_load_event.0.location.trading_subregion.source', 'events.cargo_fso_load_event.0.pos.0', 'events.cargo_fso_load_event.0.pos.1', 'events.cargo_fso_load_event.0.probability', 'events.cargo_fso_load_event.0.start_timestamp', 'events.cargo_fso_load_event.0.to_vessel_id', 'events.cargo_fso_load_event.0.to_vessel_name', 'events.cargo_fso_unload_event.0.end_timestamp', 'events.cargo_fso_unload_event.0.event_type', 'events.cargo_fso_unload_event.0.from_vessel_id', 'events.cargo_fso_unload_event.0.from_vessel_name', 'events.cargo_fso_unload_event.0.fso_vessel_id', 'events.cargo_fso_unload_event.0.fso_vessel_name', 'events.cargo_fso_unload_event.0.location.country.id', 'events.cargo_fso_unload_event.0.location.country.label', 'events.cargo_fso_unload_event.0.location.country.layer', 'events.cargo_fso_unload_event.0.location.country.probability', 'events.cargo_fso_unload_event.0.location.country.source', 'events.cargo_fso_unload_event.0.location.region.id', 'events.cargo_fso_unload_event.0.location.region.label', 'events.cargo_fso_unload_event.0.location.region.layer', 'events.cargo_fso_unload_event.0.location.region.probability', 'events.cargo_fso_unload_event.0.location.region.source', 'events.cargo_fso_unload_event.0.location.shipping_region.id', 'events.cargo_fso_unload_event.0.location.shipping_region.label', 'events.cargo_fso_unload_event.0.location.shipping_region.layer', 'events.cargo_fso_unload_event.0.location.shipping_region.probability', 'events.cargo_fso_unload_event.0.location.shipping_region.source', 'events.cargo_fso_unload_event.0.location.sts_zone.id', 'events.cargo_fso_unload_event.0.location.sts_zone.label', 'events.cargo_fso_unload_event.0.location.sts_zone.layer', 'events.cargo_fso_unload_event.0.location.sts_zone.probability', 'events.cargo_fso_unload_event.0.location.sts_zone.source', 'events.cargo_fso_unload_event.0.location.trading_block.id', 'events.cargo_fso_unload_event.0.location.trading_block.label', 'events.cargo_fso_unload_event.0.location.trading_block.layer', 'events.cargo_fso_unload_event.0.location.trading_block.probability', 'events.cargo_fso_unload_event.0.location.trading_block.source', 'events.cargo_fso_unload_event.0.location.trading_region.id', 'events.cargo_fso_unload_event.0.location.trading_region.label', 'events.cargo_fso_unload_event.0.location.trading_region.layer', 'events.cargo_fso_unload_event.0.location.trading_region.probability', 'events.cargo_fso_unload_event.0.location.trading_region.source', 'events.cargo_fso_unload_event.0.location.trading_subregion.id', 'events.cargo_fso_unload_event.0.location.trading_subregion.label', 'events.cargo_fso_unload_event.0.location.trading_subregion.layer', 'events.cargo_fso_unload_event.0.location.trading_subregion.probability', 'events.cargo_fso_unload_event.0.location.trading_subregion.source', 'events.cargo_fso_unload_event.0.pos.0', 'events.cargo_fso_unload_event.0.pos.1', 'events.cargo_fso_unload_event.0.probability', 'events.cargo_fso_unload_event.0.start_timestamp', 'events.cargo_port_load_event.0.end_timestamp', 'events.cargo_port_load_event.0.event_type', 'events.cargo_port_load_event.0.location.country.id', 'events.cargo_port_load_event.0.location.country.label', 'events.cargo_port_load_event.0.location.country.layer', 'events.cargo_port_load_event.0.location.country.probability', 'events.cargo_port_load_event.0.location.country.source', 'events.cargo_port_load_event.0.location.port.id', 'events.cargo_port_load_event.0.location.port.label', 'events.cargo_port_load_event.0.location.port.layer', 'events.cargo_port_load_event.0.location.port.probability', 'events.cargo_port_load_event.0.location.port.source', 'events.cargo_port_load_event.0.location.region.id', 'events.cargo_port_load_event.0.location.region.label', 'events.cargo_port_load_event.0.location.region.layer', 'events.cargo_port_load_event.0.location.region.probability', 'events.cargo_port_load_event.0.location.region.source', 'events.cargo_port_load_event.0.location.shipping_region.id', 'events.cargo_port_load_event.0.location.shipping_region.label', 'events.cargo_port_load_event.0.location.shipping_region.layer', 'events.cargo_port_load_event.0.location.shipping_region.probability', 'events.cargo_port_load_event.0.location.shipping_region.source', 'events.cargo_port_load_event.0.location.terminal.id', 'events.cargo_port_load_event.0.location.terminal.label', 'events.cargo_port_load_event.0.location.terminal.layer', 'events.cargo_port_load_event.0.location.terminal.probability', 'events.cargo_port_load_event.0.location.terminal.source', 'events.cargo_port_load_event.0.location.trading_block.id', 'events.cargo_port_load_event.0.location.trading_block.label', 'events.cargo_port_load_event.0.location.trading_block.layer', 'events.cargo_port_load_event.0.location.trading_block.probability', 'events.cargo_port_load_event.0.location.trading_block.source', 'events.cargo_port_load_event.0.location.trading_region.id', 'events.cargo_port_load_event.0.location.trading_region.label', 'events.cargo_port_load_event.0.location.trading_region.layer', 'events.cargo_port_load_event.0.location.trading_region.probability', 'events.cargo_port_load_event.0.location.trading_region.source', 'events.cargo_port_load_event.0.location.trading_subregion.id', 'events.cargo_port_load_event.0.location.trading_subregion.label', 'events.cargo_port_load_event.0.location.trading_subregion.layer', 'events.cargo_port_load_event.0.location.trading_subregion.probability', 'events.cargo_port_load_event.0.location.trading_subregion.source', 'events.cargo_port_load_event.0.pos.0', 'events.cargo_port_load_event.0.pos.1', 'events.cargo_port_load_event.0.probability', 'events.cargo_port_load_event.0.start_timestamp', 'events.cargo_port_unload_event.0.end_timestamp', 'events.cargo_port_unload_event.0.event_type', 'events.cargo_port_unload_event.0.location.country.id', 'events.cargo_port_unload_event.0.location.country.label', 'events.cargo_port_unload_event.0.location.country.layer', 'events.cargo_port_unload_event.0.location.country.probability', 'events.cargo_port_unload_event.0.location.country.source', 'events.cargo_port_unload_event.0.location.port.id', 'events.cargo_port_unload_event.0.location.port.label', 'events.cargo_port_unload_event.0.location.port.layer', 'events.cargo_port_unload_event.0.location.port.probability', 'events.cargo_port_unload_event.0.location.port.source', 'events.cargo_port_unload_event.0.location.region.id', 'events.cargo_port_unload_event.0.location.region.label', 'events.cargo_port_unload_event.0.location.region.layer', 'events.cargo_port_unload_event.0.location.region.probability', 'events.cargo_port_unload_event.0.location.region.source', 'events.cargo_port_unload_event.0.location.shipping_region.id', 'events.cargo_port_unload_event.0.location.shipping_region.label', 'events.cargo_port_unload_event.0.location.shipping_region.layer', 'events.cargo_port_unload_event.0.location.shipping_region.probability', 'events.cargo_port_unload_event.0.location.shipping_region.source', 'events.cargo_port_unload_event.0.location.sts_zone.id', 'events.cargo_port_unload_event.0.location.sts_zone.label', 'events.cargo_port_unload_event.0.location.sts_zone.layer', 'events.cargo_port_unload_event.0.location.sts_zone.probability', 'events.cargo_port_unload_event.0.location.sts_zone.source', 'events.cargo_port_unload_event.0.location.terminal.id', 'events.cargo_port_unload_event.0.location.terminal.label', 'events.cargo_port_unload_event.0.location.terminal.layer', 'events.cargo_port_unload_event.0.location.terminal.probability', 'events.cargo_port_unload_event.0.location.terminal.source', 'events.cargo_port_unload_event.0.location.trading_block.id', 'events.cargo_port_unload_event.0.location.trading_block.label', 'events.cargo_port_unload_event.0.location.trading_block.layer', 'events.cargo_port_unload_event.0.location.trading_block.probability', 'events.cargo_port_unload_event.0.location.trading_block.source', 'events.cargo_port_unload_event.0.location.trading_region.id', 'events.cargo_port_unload_event.0.location.trading_region.label', 'events.cargo_port_unload_event.0.location.trading_region.layer', 'events.cargo_port_unload_event.0.location.trading_region.probability', 'events.cargo_port_unload_event.0.location.trading_region.source', 'events.cargo_port_unload_event.0.location.trading_subregion.id', 'events.cargo_port_unload_event.0.location.trading_subregion.label', 'events.cargo_port_unload_event.0.location.trading_subregion.layer', 'events.cargo_port_unload_event.0.location.trading_subregion.probability', 'events.cargo_port_unload_event.0.location.trading_subregion.source', 'events.cargo_port_unload_event.0.pos.0', 'events.cargo_port_unload_event.0.pos.1', 'events.cargo_port_unload_event.0.probability', 'events.cargo_port_unload_event.0.start_timestamp', 'events.cargo_storage_event.0.end_timestamp', 'events.cargo_storage_event.0.event_type', 'events.cargo_storage_event.0.location.country.id', 'events.cargo_storage_event.0.location.country.label', 'events.cargo_storage_event.0.location.country.layer', 'events.cargo_storage_event.0.location.country.probability', 'events.cargo_storage_event.0.location.country.source', 'events.cargo_storage_event.0.location.region.id', 'events.cargo_storage_event.0.location.region.label', 'events.cargo_storage_event.0.location.region.layer', 'events.cargo_storage_event.0.location.region.probability', 'events.cargo_storage_event.0.location.region.source', 'events.cargo_storage_event.0.location.shipping_region.id', 'events.cargo_storage_event.0.location.shipping_region.label', 'events.cargo_storage_event.0.location.shipping_region.layer', 'events.cargo_storage_event.0.location.shipping_region.probability', 'events.cargo_storage_event.0.location.shipping_region.source', 'events.cargo_storage_event.0.location.trading_block.id', 'events.cargo_storage_event.0.location.trading_block.label', 'events.cargo_storage_event.0.location.trading_block.layer', 'events.cargo_storage_event.0.location.trading_block.probability', 'events.cargo_storage_event.0.location.trading_block.source', 'events.cargo_storage_event.0.location.trading_region.id', 'events.cargo_storage_event.0.location.trading_region.label', 'events.cargo_storage_event.0.location.trading_region.layer', 'events.cargo_storage_event.0.location.trading_region.probability', 'events.cargo_storage_event.0.location.trading_region.source', 'events.cargo_storage_event.0.location.trading_subregion.id', 'events.cargo_storage_event.0.location.trading_subregion.label', 'events.cargo_storage_event.0.location.trading_subregion.layer', 'events.cargo_storage_event.0.location.trading_subregion.probability', 'events.cargo_storage_event.0.location.trading_subregion.source', 'events.cargo_storage_event.0.pos.0', 'events.cargo_storage_event.0.pos.1', 'events.cargo_storage_event.0.start_timestamp', 'events.cargo_storage_event.0.vessel_id', 'events.cargo_sts_event.0.end_timestamp', 'events.cargo_sts_event.0.event_type', 'events.cargo_sts_event.0.from_vessel_id', 'events.cargo_sts_event.0.from_vessel_name', 'events.cargo_sts_event.0.location.country.id', 'events.cargo_sts_event.0.location.country.label', 'events.cargo_sts_event.0.location.country.layer', 'events.cargo_sts_event.0.location.country.probability', 'events.cargo_sts_event.0.location.country.source', 'events.cargo_sts_event.0.location.port.id', 'events.cargo_sts_event.0.location.port.label', 'events.cargo_sts_event.0.location.port.layer', 'events.cargo_sts_event.0.location.port.probability', 'events.cargo_sts_event.0.location.port.source', 'events.cargo_sts_event.0.location.region.id', 'events.cargo_sts_event.0.location.region.label', 'events.cargo_sts_event.0.location.region.layer', 'events.cargo_sts_event.0.location.region.probability', 'events.cargo_sts_event.0.location.region.source', 'events.cargo_sts_event.0.location.shipping_region.id', 'events.cargo_sts_event.0.location.shipping_region.label', 'events.cargo_sts_event.0.location.shipping_region.layer', 'events.cargo_sts_event.0.location.shipping_region.probability', 'events.cargo_sts_event.0.location.shipping_region.source', 'events.cargo_sts_event.0.location.sts_zone.id', 'events.cargo_sts_event.0.location.sts_zone.label', 'events.cargo_sts_event.0.location.sts_zone.layer', 'events.cargo_sts_event.0.location.sts_zone.probability', 'events.cargo_sts_event.0.location.sts_zone.source', 'events.cargo_sts_event.0.location.trading_block.id', 'events.cargo_sts_event.0.location.trading_block.label', 'events.cargo_sts_event.0.location.trading_block.layer', 'events.cargo_sts_event.0.location.trading_block.probability', 'events.cargo_sts_event.0.location.trading_block.source', 'events.cargo_sts_event.0.location.trading_region.id', 'events.cargo_sts_event.0.location.trading_region.label', 'events.cargo_sts_event.0.location.trading_region.layer', 'events.cargo_sts_event.0.location.trading_region.probability', 'events.cargo_sts_event.0.location.trading_region.source', 'events.cargo_sts_event.0.location.trading_subregion.id', 'events.cargo_sts_event.0.location.trading_subregion.label', 'events.cargo_sts_event.0.location.trading_subregion.layer', 'events.cargo_sts_event.0.location.trading_subregion.probability', 'events.cargo_sts_event.0.location.trading_subregion.source', 'events.cargo_sts_event.0.pos.0', 'events.cargo_sts_event.0.pos.1', 'events.cargo_sts_event.0.start_timestamp', 'events.cargo_sts_event.0.to_vessel_id', 'events.cargo_sts_event.0.to_vessel_name', 'events.cargo_sts_event.1.end_timestamp', 'events.cargo_sts_event.1.event_type', 'events.cargo_sts_event.1.from_vessel_id', 'events.cargo_sts_event.1.from_vessel_name', 'events.cargo_sts_event.1.location.country.id', 'events.cargo_sts_event.1.location.country.label', 'events.cargo_sts_event.1.location.country.layer', 'events.cargo_sts_event.1.location.country.probability', 'events.cargo_sts_event.1.location.country.source', 'events.cargo_sts_event.1.location.region.id', 'events.cargo_sts_event.1.location.region.label', 'events.cargo_sts_event.1.location.region.layer', 'events.cargo_sts_event.1.location.region.probability', 'events.cargo_sts_event.1.location.region.source', 'events.cargo_sts_event.1.location.shipping_region.id', 'events.cargo_sts_event.1.location.shipping_region.label', 'events.cargo_sts_event.1.location.shipping_region.layer', 'events.cargo_sts_event.1.location.shipping_region.probability', 'events.cargo_sts_event.1.location.shipping_region.source', 'events.cargo_sts_event.1.location.sts_zone.id', 'events.cargo_sts_event.1.location.sts_zone.label', 'events.cargo_sts_event.1.location.sts_zone.layer', 'events.cargo_sts_event.1.location.sts_zone.probability', 'events.cargo_sts_event.1.location.sts_zone.source', 'events.cargo_sts_event.1.location.trading_block.id', 'events.cargo_sts_event.1.location.trading_block.label', 'events.cargo_sts_event.1.location.trading_block.layer', 'events.cargo_sts_event.1.location.trading_block.probability', 'events.cargo_sts_event.1.location.trading_block.source', 'events.cargo_sts_event.1.location.trading_region.id', 'events.cargo_sts_event.1.location.trading_region.label', 'events.cargo_sts_event.1.location.trading_region.layer', 'events.cargo_sts_event.1.location.trading_region.probability', 'events.cargo_sts_event.1.location.trading_region.source', 'events.cargo_sts_event.1.location.trading_subregion.id', 'events.cargo_sts_event.1.location.trading_subregion.label', 'events.cargo_sts_event.1.location.trading_subregion.layer', 'events.cargo_sts_event.1.location.trading_subregion.probability', 'events.cargo_sts_event.1.location.trading_subregion.source', 'events.cargo_sts_event.1.pos.0', 'events.cargo_sts_event.1.pos.1', 'events.cargo_sts_event.1.start_timestamp', 'events.cargo_sts_event.1.to_vessel_id', 'events.cargo_sts_event.1.to_vessel_name', 'product.category.id', 'product.category.label', 'product.category.layer', 'product.category.probability', 'product.category.source', 'product.grade.id', 'product.grade.label', 'product.grade.layer', 'product.grade.probability', 'product.grade.source', 'product.group.id', 'product.group.label', 'product.group.layer', 'product.group.probability', 'product.group.source', 'product.group_product.id', 'product.group_product.label', 'product.group_product.layer', 'product.group_product.probability', 'product.group_product.source', 'quantity', 'status', 'vessels.0.corporate_entities.charterer.id', 'vessels.0.corporate_entities.charterer.label', 'vessels.0.corporate_entities.charterer.layer', 'vessels.0.corporate_entities.charterer.probability', 'vessels.0.corporate_entities.charterer.source', 'vessels.0.corporate_entities.effective_controller.id', 'vessels.0.corporate_entities.effective_controller.label', 'vessels.0.corporate_entities.effective_controller.layer', 'vessels.0.corporate_entities.effective_controller.probability', 'vessels.0.corporate_entities.effective_controller.source', 'vessels.0.corporate_entities.time_charterer.end_timestamp', 'vessels.0.corporate_entities.time_charterer.id', 'vessels.0.corporate_entities.time_charterer.label', 'vessels.0.corporate_entities.time_charterer.layer', 'vessels.0.corporate_entities.time_charterer.probability', 'vessels.0.corporate_entities.time_charterer.source', 'vessels.0.corporate_entities.time_charterer.start_timestamp', 'vessels.0.cubic_capacity', 'vessels.0.dwt', 'vessels.0.end_timestamp', 'vessels.0.fixture_fulfilled', 'vessels.0.fixture_id', 'vessels.0.id', 'vessels.0.imo', 'vessels.0.mmsi', 'vessels.0.name', 'vessels.0.start_timestamp', 'vessels.0.status', 'vessels.0.tags.0.end_timestamp', 'vessels.0.tags.0.start_timestamp', 'vessels.0.tags.0.tag', 'vessels.0.vessel_class', 'vessels.0.voyage_id', 'vessels.1.corporate_entities.charterer.id', 'vessels.1.corporate_entities.charterer.label', 'vessels.1.corporate_entities.charterer.layer', 'vessels.1.corporate_entities.charterer.probability', 'vessels.1.corporate_entities.charterer.source', 'vessels.1.corporate_entities.effective_controller.id', 'vessels.1.corporate_entities.effective_controller.label', 'vessels.1.corporate_entities.effective_controller.layer', 'vessels.1.corporate_entities.effective_controller.probability', 'vessels.1.corporate_entities.effective_controller.source', 'vessels.1.corporate_entities.time_charterer.end_timestamp', 'vessels.1.corporate_entities.time_charterer.id', 'vessels.1.corporate_entities.time_charterer.label', 'vessels.1.corporate_entities.time_charterer.layer', 'vessels.1.corporate_entities.time_charterer.probability', 'vessels.1.corporate_entities.time_charterer.source', 'vessels.1.corporate_entities.time_charterer.start_timestamp', 'vessels.1.cubic_capacity', 'vessels.1.dwt', 'vessels.1.end_timestamp', 'vessels.1.fixture_fulfilled', 'vessels.1.id', 'vessels.1.imo', 'vessels.1.mmsi', 'vessels.1.name', 'vessels.1.start_timestamp', 'vessels.1.status', 'vessels.1.tags.0.end_timestamp', 'vessels.1.tags.0.start_timestamp', 'vessels.1.tags.0.tag', 'vessels.1.vessel_class', 'vessels.1.voyage_id', 'vessels.2.corporate_entities.charterer.id', 'vessels.2.corporate_entities.charterer.label', 'vessels.2.corporate_entities.charterer.layer', 'vessels.2.corporate_entities.charterer.probability', 'vessels.2.corporate_entities.charterer.source', 'vessels.2.corporate_entities.effective_controller.id', 'vessels.2.corporate_entities.effective_controller.label', 'vessels.2.corporate_entities.effective_controller.layer', 'vessels.2.corporate_entities.effective_controller.probability', 'vessels.2.corporate_entities.effective_controller.source', 'vessels.2.corporate_entities.time_charterer.end_timestamp', 'vessels.2.corporate_entities.time_charterer.id', 'vessels.2.corporate_entities.time_charterer.label', 'vessels.2.corporate_entities.time_charterer.layer', 'vessels.2.corporate_entities.time_charterer.probability', 'vessels.2.corporate_entities.time_charterer.source', 'vessels.2.corporate_entities.time_charterer.start_timestamp', 'vessels.2.cubic_capacity', 'vessels.2.dwt', 'vessels.2.end_timestamp', 'vessels.2.id', 'vessels.2.imo', 'vessels.2.mmsi', 'vessels.2.name', 'vessels.2.start_timestamp', 'vessels.2.status', 'vessels.2.tags.0.start_timestamp', 'vessels.2.tags.0.tag', 'vessels.2.vessel_class', 'vessels.2.voyage_id', 'vessels.3.corporate_entities.effective_controller.id', 'vessels.3.corporate_entities.effective_controller.label', 'vessels.3.corporate_entities.effective_controller.layer', 'vessels.3.corporate_entities.effective_controller.probability', 'vessels.3.corporate_entities.effective_controller.source', 'vessels.3.cubic_capacity', 'vessels.3.dwt', 'vessels.3.id', 'vessels.3.imo', 'vessels.3.mmsi', 'vessels.3.name', 'vessels.3.start_timestamp', 'vessels.3.status', 'vessels.3.vessel_class', 'vessels.3.voyage_id', 'parent_ids.0.id', 'parent_ids.0.splinter_timestamp', 'parent_ids.1.id', 'parent_ids.1.splinter_timestamp', ]","title":"Cargo Movements"},{"location":"endpoints/cargo_movements/#notes","text":"A cargo movement is a complicated, nested structure. Between it's point of loading and discharge, a cargo movement may be carried by N or more vessels, with N-1 associated STS events. Each of these N vessels could have an associated effective controller, charterer, time charterer... etc. In order to represent a cargo movement as a flat (not nested) record in a dataframe, the sdk flattens the cargo movement, generating many columns in the process. The columns are logically named. Let's say that a cargo is transferred between 4 vessels en route from a load in Rotterdam to a discharge in New York. This is represented as 1 cargo_port_unload_event , followed by 3 cargo_sts_event s, and finally 1 cargo_port_unload_event . In this example the name of the 1st vessel, is found in the vessels.0.name column (we're using zero-based numbering indexes). Likewise, the imo of the second vessel is found in the vessels.1.imo column. To find the name of the country in which the second STS event occured, we'd use the events.cargo_sts_event.1.location.country.layer column. Similarly, to find out when the first vessel started loading the cargo from Rotterdam, we'd use the events.cargo_port_load_event.0.start_timestamp column. By default, the columns returned are something along the lines of. DEFAULT_COLUMNS = [ 'events.cargo_port_load_event.0.location.port.label', 'events.cargo_port_unload_event.0.location.port.label', 'product.group.label', 'product.grade.label', 'quantity', 'vessels.0.name', 'events.cargo_port_load_event.0.end_timestamp', 'events.cargo_port_unload_event.0.start_timestamp', ] The exact default columns used can be found at cargo_movements.DEFAULT_COLUMNS A near complete list of columns is given below [ 'cargo_movement_id', 'events.cargo_fso_load_event.0.end_timestamp', 'events.cargo_fso_load_event.0.event_type', 'events.cargo_fso_load_event.0.fso_vessel_id', 'events.cargo_fso_load_event.0.fso_vessel_name', 'events.cargo_fso_load_event.0.location.country.id', 'events.cargo_fso_load_event.0.location.country.label', 'events.cargo_fso_load_event.0.location.country.layer', 'events.cargo_fso_load_event.0.location.country.probability', 'events.cargo_fso_load_event.0.location.country.source', 'events.cargo_fso_load_event.0.location.region.id', 'events.cargo_fso_load_event.0.location.region.label', 'events.cargo_fso_load_event.0.location.region.layer', 'events.cargo_fso_load_event.0.location.region.probability', 'events.cargo_fso_load_event.0.location.region.source', 'events.cargo_fso_load_event.0.location.shipping_region.id', 'events.cargo_fso_load_event.0.location.shipping_region.label', 'events.cargo_fso_load_event.0.location.shipping_region.layer', 'events.cargo_fso_load_event.0.location.shipping_region.probability', 'events.cargo_fso_load_event.0.location.shipping_region.source', 'events.cargo_fso_load_event.0.location.sts_zone.id', 'events.cargo_fso_load_event.0.location.sts_zone.label', 'events.cargo_fso_load_event.0.location.sts_zone.layer', 'events.cargo_fso_load_event.0.location.sts_zone.probability', 'events.cargo_fso_load_event.0.location.sts_zone.source', 'events.cargo_fso_load_event.0.location.trading_block.id', 'events.cargo_fso_load_event.0.location.trading_block.label', 'events.cargo_fso_load_event.0.location.trading_block.layer', 'events.cargo_fso_load_event.0.location.trading_block.probability', 'events.cargo_fso_load_event.0.location.trading_block.source', 'events.cargo_fso_load_event.0.location.trading_region.id', 'events.cargo_fso_load_event.0.location.trading_region.label', 'events.cargo_fso_load_event.0.location.trading_region.layer', 'events.cargo_fso_load_event.0.location.trading_region.probability', 'events.cargo_fso_load_event.0.location.trading_region.source', 'events.cargo_fso_load_event.0.location.trading_subregion.id', 'events.cargo_fso_load_event.0.location.trading_subregion.label', 'events.cargo_fso_load_event.0.location.trading_subregion.layer', 'events.cargo_fso_load_event.0.location.trading_subregion.probability', 'events.cargo_fso_load_event.0.location.trading_subregion.source', 'events.cargo_fso_load_event.0.pos.0', 'events.cargo_fso_load_event.0.pos.1', 'events.cargo_fso_load_event.0.probability', 'events.cargo_fso_load_event.0.start_timestamp', 'events.cargo_fso_load_event.0.to_vessel_id', 'events.cargo_fso_load_event.0.to_vessel_name', 'events.cargo_fso_unload_event.0.end_timestamp', 'events.cargo_fso_unload_event.0.event_type', 'events.cargo_fso_unload_event.0.from_vessel_id', 'events.cargo_fso_unload_event.0.from_vessel_name', 'events.cargo_fso_unload_event.0.fso_vessel_id', 'events.cargo_fso_unload_event.0.fso_vessel_name', 'events.cargo_fso_unload_event.0.location.country.id', 'events.cargo_fso_unload_event.0.location.country.label', 'events.cargo_fso_unload_event.0.location.country.layer', 'events.cargo_fso_unload_event.0.location.country.probability', 'events.cargo_fso_unload_event.0.location.country.source', 'events.cargo_fso_unload_event.0.location.region.id', 'events.cargo_fso_unload_event.0.location.region.label', 'events.cargo_fso_unload_event.0.location.region.layer', 'events.cargo_fso_unload_event.0.location.region.probability', 'events.cargo_fso_unload_event.0.location.region.source', 'events.cargo_fso_unload_event.0.location.shipping_region.id', 'events.cargo_fso_unload_event.0.location.shipping_region.label', 'events.cargo_fso_unload_event.0.location.shipping_region.layer', 'events.cargo_fso_unload_event.0.location.shipping_region.probability', 'events.cargo_fso_unload_event.0.location.shipping_region.source', 'events.cargo_fso_unload_event.0.location.sts_zone.id', 'events.cargo_fso_unload_event.0.location.sts_zone.label', 'events.cargo_fso_unload_event.0.location.sts_zone.layer', 'events.cargo_fso_unload_event.0.location.sts_zone.probability', 'events.cargo_fso_unload_event.0.location.sts_zone.source', 'events.cargo_fso_unload_event.0.location.trading_block.id', 'events.cargo_fso_unload_event.0.location.trading_block.label', 'events.cargo_fso_unload_event.0.location.trading_block.layer', 'events.cargo_fso_unload_event.0.location.trading_block.probability', 'events.cargo_fso_unload_event.0.location.trading_block.source', 'events.cargo_fso_unload_event.0.location.trading_region.id', 'events.cargo_fso_unload_event.0.location.trading_region.label', 'events.cargo_fso_unload_event.0.location.trading_region.layer', 'events.cargo_fso_unload_event.0.location.trading_region.probability', 'events.cargo_fso_unload_event.0.location.trading_region.source', 'events.cargo_fso_unload_event.0.location.trading_subregion.id', 'events.cargo_fso_unload_event.0.location.trading_subregion.label', 'events.cargo_fso_unload_event.0.location.trading_subregion.layer', 'events.cargo_fso_unload_event.0.location.trading_subregion.probability', 'events.cargo_fso_unload_event.0.location.trading_subregion.source', 'events.cargo_fso_unload_event.0.pos.0', 'events.cargo_fso_unload_event.0.pos.1', 'events.cargo_fso_unload_event.0.probability', 'events.cargo_fso_unload_event.0.start_timestamp', 'events.cargo_port_load_event.0.end_timestamp', 'events.cargo_port_load_event.0.event_type', 'events.cargo_port_load_event.0.location.country.id', 'events.cargo_port_load_event.0.location.country.label', 'events.cargo_port_load_event.0.location.country.layer', 'events.cargo_port_load_event.0.location.country.probability', 'events.cargo_port_load_event.0.location.country.source', 'events.cargo_port_load_event.0.location.port.id', 'events.cargo_port_load_event.0.location.port.label', 'events.cargo_port_load_event.0.location.port.layer', 'events.cargo_port_load_event.0.location.port.probability', 'events.cargo_port_load_event.0.location.port.source', 'events.cargo_port_load_event.0.location.region.id', 'events.cargo_port_load_event.0.location.region.label', 'events.cargo_port_load_event.0.location.region.layer', 'events.cargo_port_load_event.0.location.region.probability', 'events.cargo_port_load_event.0.location.region.source', 'events.cargo_port_load_event.0.location.shipping_region.id', 'events.cargo_port_load_event.0.location.shipping_region.label', 'events.cargo_port_load_event.0.location.shipping_region.layer', 'events.cargo_port_load_event.0.location.shipping_region.probability', 'events.cargo_port_load_event.0.location.shipping_region.source', 'events.cargo_port_load_event.0.location.terminal.id', 'events.cargo_port_load_event.0.location.terminal.label', 'events.cargo_port_load_event.0.location.terminal.layer', 'events.cargo_port_load_event.0.location.terminal.probability', 'events.cargo_port_load_event.0.location.terminal.source', 'events.cargo_port_load_event.0.location.trading_block.id', 'events.cargo_port_load_event.0.location.trading_block.label', 'events.cargo_port_load_event.0.location.trading_block.layer', 'events.cargo_port_load_event.0.location.trading_block.probability', 'events.cargo_port_load_event.0.location.trading_block.source', 'events.cargo_port_load_event.0.location.trading_region.id', 'events.cargo_port_load_event.0.location.trading_region.label', 'events.cargo_port_load_event.0.location.trading_region.layer', 'events.cargo_port_load_event.0.location.trading_region.probability', 'events.cargo_port_load_event.0.location.trading_region.source', 'events.cargo_port_load_event.0.location.trading_subregion.id', 'events.cargo_port_load_event.0.location.trading_subregion.label', 'events.cargo_port_load_event.0.location.trading_subregion.layer', 'events.cargo_port_load_event.0.location.trading_subregion.probability', 'events.cargo_port_load_event.0.location.trading_subregion.source', 'events.cargo_port_load_event.0.pos.0', 'events.cargo_port_load_event.0.pos.1', 'events.cargo_port_load_event.0.probability', 'events.cargo_port_load_event.0.start_timestamp', 'events.cargo_port_unload_event.0.end_timestamp', 'events.cargo_port_unload_event.0.event_type', 'events.cargo_port_unload_event.0.location.country.id', 'events.cargo_port_unload_event.0.location.country.label', 'events.cargo_port_unload_event.0.location.country.layer', 'events.cargo_port_unload_event.0.location.country.probability', 'events.cargo_port_unload_event.0.location.country.source', 'events.cargo_port_unload_event.0.location.port.id', 'events.cargo_port_unload_event.0.location.port.label', 'events.cargo_port_unload_event.0.location.port.layer', 'events.cargo_port_unload_event.0.location.port.probability', 'events.cargo_port_unload_event.0.location.port.source', 'events.cargo_port_unload_event.0.location.region.id', 'events.cargo_port_unload_event.0.location.region.label', 'events.cargo_port_unload_event.0.location.region.layer', 'events.cargo_port_unload_event.0.location.region.probability', 'events.cargo_port_unload_event.0.location.region.source', 'events.cargo_port_unload_event.0.location.shipping_region.id', 'events.cargo_port_unload_event.0.location.shipping_region.label', 'events.cargo_port_unload_event.0.location.shipping_region.layer', 'events.cargo_port_unload_event.0.location.shipping_region.probability', 'events.cargo_port_unload_event.0.location.shipping_region.source', 'events.cargo_port_unload_event.0.location.sts_zone.id', 'events.cargo_port_unload_event.0.location.sts_zone.label', 'events.cargo_port_unload_event.0.location.sts_zone.layer', 'events.cargo_port_unload_event.0.location.sts_zone.probability', 'events.cargo_port_unload_event.0.location.sts_zone.source', 'events.cargo_port_unload_event.0.location.terminal.id', 'events.cargo_port_unload_event.0.location.terminal.label', 'events.cargo_port_unload_event.0.location.terminal.layer', 'events.cargo_port_unload_event.0.location.terminal.probability', 'events.cargo_port_unload_event.0.location.terminal.source', 'events.cargo_port_unload_event.0.location.trading_block.id', 'events.cargo_port_unload_event.0.location.trading_block.label', 'events.cargo_port_unload_event.0.location.trading_block.layer', 'events.cargo_port_unload_event.0.location.trading_block.probability', 'events.cargo_port_unload_event.0.location.trading_block.source', 'events.cargo_port_unload_event.0.location.trading_region.id', 'events.cargo_port_unload_event.0.location.trading_region.label', 'events.cargo_port_unload_event.0.location.trading_region.layer', 'events.cargo_port_unload_event.0.location.trading_region.probability', 'events.cargo_port_unload_event.0.location.trading_region.source', 'events.cargo_port_unload_event.0.location.trading_subregion.id', 'events.cargo_port_unload_event.0.location.trading_subregion.label', 'events.cargo_port_unload_event.0.location.trading_subregion.layer', 'events.cargo_port_unload_event.0.location.trading_subregion.probability', 'events.cargo_port_unload_event.0.location.trading_subregion.source', 'events.cargo_port_unload_event.0.pos.0', 'events.cargo_port_unload_event.0.pos.1', 'events.cargo_port_unload_event.0.probability', 'events.cargo_port_unload_event.0.start_timestamp', 'events.cargo_storage_event.0.end_timestamp', 'events.cargo_storage_event.0.event_type', 'events.cargo_storage_event.0.location.country.id', 'events.cargo_storage_event.0.location.country.label', 'events.cargo_storage_event.0.location.country.layer', 'events.cargo_storage_event.0.location.country.probability', 'events.cargo_storage_event.0.location.country.source', 'events.cargo_storage_event.0.location.region.id', 'events.cargo_storage_event.0.location.region.label', 'events.cargo_storage_event.0.location.region.layer', 'events.cargo_storage_event.0.location.region.probability', 'events.cargo_storage_event.0.location.region.source', 'events.cargo_storage_event.0.location.shipping_region.id', 'events.cargo_storage_event.0.location.shipping_region.label', 'events.cargo_storage_event.0.location.shipping_region.layer', 'events.cargo_storage_event.0.location.shipping_region.probability', 'events.cargo_storage_event.0.location.shipping_region.source', 'events.cargo_storage_event.0.location.trading_block.id', 'events.cargo_storage_event.0.location.trading_block.label', 'events.cargo_storage_event.0.location.trading_block.layer', 'events.cargo_storage_event.0.location.trading_block.probability', 'events.cargo_storage_event.0.location.trading_block.source', 'events.cargo_storage_event.0.location.trading_region.id', 'events.cargo_storage_event.0.location.trading_region.label', 'events.cargo_storage_event.0.location.trading_region.layer', 'events.cargo_storage_event.0.location.trading_region.probability', 'events.cargo_storage_event.0.location.trading_region.source', 'events.cargo_storage_event.0.location.trading_subregion.id', 'events.cargo_storage_event.0.location.trading_subregion.label', 'events.cargo_storage_event.0.location.trading_subregion.layer', 'events.cargo_storage_event.0.location.trading_subregion.probability', 'events.cargo_storage_event.0.location.trading_subregion.source', 'events.cargo_storage_event.0.pos.0', 'events.cargo_storage_event.0.pos.1', 'events.cargo_storage_event.0.start_timestamp', 'events.cargo_storage_event.0.vessel_id', 'events.cargo_sts_event.0.end_timestamp', 'events.cargo_sts_event.0.event_type', 'events.cargo_sts_event.0.from_vessel_id', 'events.cargo_sts_event.0.from_vessel_name', 'events.cargo_sts_event.0.location.country.id', 'events.cargo_sts_event.0.location.country.label', 'events.cargo_sts_event.0.location.country.layer', 'events.cargo_sts_event.0.location.country.probability', 'events.cargo_sts_event.0.location.country.source', 'events.cargo_sts_event.0.location.port.id', 'events.cargo_sts_event.0.location.port.label', 'events.cargo_sts_event.0.location.port.layer', 'events.cargo_sts_event.0.location.port.probability', 'events.cargo_sts_event.0.location.port.source', 'events.cargo_sts_event.0.location.region.id', 'events.cargo_sts_event.0.location.region.label', 'events.cargo_sts_event.0.location.region.layer', 'events.cargo_sts_event.0.location.region.probability', 'events.cargo_sts_event.0.location.region.source', 'events.cargo_sts_event.0.location.shipping_region.id', 'events.cargo_sts_event.0.location.shipping_region.label', 'events.cargo_sts_event.0.location.shipping_region.layer', 'events.cargo_sts_event.0.location.shipping_region.probability', 'events.cargo_sts_event.0.location.shipping_region.source', 'events.cargo_sts_event.0.location.sts_zone.id', 'events.cargo_sts_event.0.location.sts_zone.label', 'events.cargo_sts_event.0.location.sts_zone.layer', 'events.cargo_sts_event.0.location.sts_zone.probability', 'events.cargo_sts_event.0.location.sts_zone.source', 'events.cargo_sts_event.0.location.trading_block.id', 'events.cargo_sts_event.0.location.trading_block.label', 'events.cargo_sts_event.0.location.trading_block.layer', 'events.cargo_sts_event.0.location.trading_block.probability', 'events.cargo_sts_event.0.location.trading_block.source', 'events.cargo_sts_event.0.location.trading_region.id', 'events.cargo_sts_event.0.location.trading_region.label', 'events.cargo_sts_event.0.location.trading_region.layer', 'events.cargo_sts_event.0.location.trading_region.probability', 'events.cargo_sts_event.0.location.trading_region.source', 'events.cargo_sts_event.0.location.trading_subregion.id', 'events.cargo_sts_event.0.location.trading_subregion.label', 'events.cargo_sts_event.0.location.trading_subregion.layer', 'events.cargo_sts_event.0.location.trading_subregion.probability', 'events.cargo_sts_event.0.location.trading_subregion.source', 'events.cargo_sts_event.0.pos.0', 'events.cargo_sts_event.0.pos.1', 'events.cargo_sts_event.0.start_timestamp', 'events.cargo_sts_event.0.to_vessel_id', 'events.cargo_sts_event.0.to_vessel_name', 'events.cargo_sts_event.1.end_timestamp', 'events.cargo_sts_event.1.event_type', 'events.cargo_sts_event.1.from_vessel_id', 'events.cargo_sts_event.1.from_vessel_name', 'events.cargo_sts_event.1.location.country.id', 'events.cargo_sts_event.1.location.country.label', 'events.cargo_sts_event.1.location.country.layer', 'events.cargo_sts_event.1.location.country.probability', 'events.cargo_sts_event.1.location.country.source', 'events.cargo_sts_event.1.location.region.id', 'events.cargo_sts_event.1.location.region.label', 'events.cargo_sts_event.1.location.region.layer', 'events.cargo_sts_event.1.location.region.probability', 'events.cargo_sts_event.1.location.region.source', 'events.cargo_sts_event.1.location.shipping_region.id', 'events.cargo_sts_event.1.location.shipping_region.label', 'events.cargo_sts_event.1.location.shipping_region.layer', 'events.cargo_sts_event.1.location.shipping_region.probability', 'events.cargo_sts_event.1.location.shipping_region.source', 'events.cargo_sts_event.1.location.sts_zone.id', 'events.cargo_sts_event.1.location.sts_zone.label', 'events.cargo_sts_event.1.location.sts_zone.layer', 'events.cargo_sts_event.1.location.sts_zone.probability', 'events.cargo_sts_event.1.location.sts_zone.source', 'events.cargo_sts_event.1.location.trading_block.id', 'events.cargo_sts_event.1.location.trading_block.label', 'events.cargo_sts_event.1.location.trading_block.layer', 'events.cargo_sts_event.1.location.trading_block.probability', 'events.cargo_sts_event.1.location.trading_block.source', 'events.cargo_sts_event.1.location.trading_region.id', 'events.cargo_sts_event.1.location.trading_region.label', 'events.cargo_sts_event.1.location.trading_region.layer', 'events.cargo_sts_event.1.location.trading_region.probability', 'events.cargo_sts_event.1.location.trading_region.source', 'events.cargo_sts_event.1.location.trading_subregion.id', 'events.cargo_sts_event.1.location.trading_subregion.label', 'events.cargo_sts_event.1.location.trading_subregion.layer', 'events.cargo_sts_event.1.location.trading_subregion.probability', 'events.cargo_sts_event.1.location.trading_subregion.source', 'events.cargo_sts_event.1.pos.0', 'events.cargo_sts_event.1.pos.1', 'events.cargo_sts_event.1.start_timestamp', 'events.cargo_sts_event.1.to_vessel_id', 'events.cargo_sts_event.1.to_vessel_name', 'product.category.id', 'product.category.label', 'product.category.layer', 'product.category.probability', 'product.category.source', 'product.grade.id', 'product.grade.label', 'product.grade.layer', 'product.grade.probability', 'product.grade.source', 'product.group.id', 'product.group.label', 'product.group.layer', 'product.group.probability', 'product.group.source', 'product.group_product.id', 'product.group_product.label', 'product.group_product.layer', 'product.group_product.probability', 'product.group_product.source', 'quantity', 'status', 'vessels.0.corporate_entities.charterer.id', 'vessels.0.corporate_entities.charterer.label', 'vessels.0.corporate_entities.charterer.layer', 'vessels.0.corporate_entities.charterer.probability', 'vessels.0.corporate_entities.charterer.source', 'vessels.0.corporate_entities.effective_controller.id', 'vessels.0.corporate_entities.effective_controller.label', 'vessels.0.corporate_entities.effective_controller.layer', 'vessels.0.corporate_entities.effective_controller.probability', 'vessels.0.corporate_entities.effective_controller.source', 'vessels.0.corporate_entities.time_charterer.end_timestamp', 'vessels.0.corporate_entities.time_charterer.id', 'vessels.0.corporate_entities.time_charterer.label', 'vessels.0.corporate_entities.time_charterer.layer', 'vessels.0.corporate_entities.time_charterer.probability', 'vessels.0.corporate_entities.time_charterer.source', 'vessels.0.corporate_entities.time_charterer.start_timestamp', 'vessels.0.cubic_capacity', 'vessels.0.dwt', 'vessels.0.end_timestamp', 'vessels.0.fixture_fulfilled', 'vessels.0.fixture_id', 'vessels.0.id', 'vessels.0.imo', 'vessels.0.mmsi', 'vessels.0.name', 'vessels.0.start_timestamp', 'vessels.0.status', 'vessels.0.tags.0.end_timestamp', 'vessels.0.tags.0.start_timestamp', 'vessels.0.tags.0.tag', 'vessels.0.vessel_class', 'vessels.0.voyage_id', 'vessels.1.corporate_entities.charterer.id', 'vessels.1.corporate_entities.charterer.label', 'vessels.1.corporate_entities.charterer.layer', 'vessels.1.corporate_entities.charterer.probability', 'vessels.1.corporate_entities.charterer.source', 'vessels.1.corporate_entities.effective_controller.id', 'vessels.1.corporate_entities.effective_controller.label', 'vessels.1.corporate_entities.effective_controller.layer', 'vessels.1.corporate_entities.effective_controller.probability', 'vessels.1.corporate_entities.effective_controller.source', 'vessels.1.corporate_entities.time_charterer.end_timestamp', 'vessels.1.corporate_entities.time_charterer.id', 'vessels.1.corporate_entities.time_charterer.label', 'vessels.1.corporate_entities.time_charterer.layer', 'vessels.1.corporate_entities.time_charterer.probability', 'vessels.1.corporate_entities.time_charterer.source', 'vessels.1.corporate_entities.time_charterer.start_timestamp', 'vessels.1.cubic_capacity', 'vessels.1.dwt', 'vessels.1.end_timestamp', 'vessels.1.fixture_fulfilled', 'vessels.1.id', 'vessels.1.imo', 'vessels.1.mmsi', 'vessels.1.name', 'vessels.1.start_timestamp', 'vessels.1.status', 'vessels.1.tags.0.end_timestamp', 'vessels.1.tags.0.start_timestamp', 'vessels.1.tags.0.tag', 'vessels.1.vessel_class', 'vessels.1.voyage_id', 'vessels.2.corporate_entities.charterer.id', 'vessels.2.corporate_entities.charterer.label', 'vessels.2.corporate_entities.charterer.layer', 'vessels.2.corporate_entities.charterer.probability', 'vessels.2.corporate_entities.charterer.source', 'vessels.2.corporate_entities.effective_controller.id', 'vessels.2.corporate_entities.effective_controller.label', 'vessels.2.corporate_entities.effective_controller.layer', 'vessels.2.corporate_entities.effective_controller.probability', 'vessels.2.corporate_entities.effective_controller.source', 'vessels.2.corporate_entities.time_charterer.end_timestamp', 'vessels.2.corporate_entities.time_charterer.id', 'vessels.2.corporate_entities.time_charterer.label', 'vessels.2.corporate_entities.time_charterer.layer', 'vessels.2.corporate_entities.time_charterer.probability', 'vessels.2.corporate_entities.time_charterer.source', 'vessels.2.corporate_entities.time_charterer.start_timestamp', 'vessels.2.cubic_capacity', 'vessels.2.dwt', 'vessels.2.end_timestamp', 'vessels.2.id', 'vessels.2.imo', 'vessels.2.mmsi', 'vessels.2.name', 'vessels.2.start_timestamp', 'vessels.2.status', 'vessels.2.tags.0.start_timestamp', 'vessels.2.tags.0.tag', 'vessels.2.vessel_class', 'vessels.2.voyage_id', 'vessels.3.corporate_entities.effective_controller.id', 'vessels.3.corporate_entities.effective_controller.label', 'vessels.3.corporate_entities.effective_controller.layer', 'vessels.3.corporate_entities.effective_controller.probability', 'vessels.3.corporate_entities.effective_controller.source', 'vessels.3.cubic_capacity', 'vessels.3.dwt', 'vessels.3.id', 'vessels.3.imo', 'vessels.3.mmsi', 'vessels.3.name', 'vessels.3.start_timestamp', 'vessels.3.status', 'vessels.3.vessel_class', 'vessels.3.voyage_id', 'parent_ids.0.id', 'parent_ids.0.splinter_timestamp', 'parent_ids.1.id', 'parent_ids.1.splinter_timestamp', ]","title":"Notes"},{"location":"endpoints/cargo_timeseries/","text":"vortexasdk.endpoints.cargo_timeseries Try me out in your browser: CargoTimeSeries CargoTimeSeries(self) search CargoTimeSeries.search(self, filter_activity: str, timeseries_activity: str = None, timeseries_frequency: str = 'day', timeseries_unit: str = 'b', filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_charterers: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_storage_locations: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, disable_geographic_exclusion_rules: bool = None, timeseries_activity_time_span_min: int = None, timeseries_activity_time_span_max: int = None) -> vortexasdk.endpoints.timeseries_result.TimeSeriesResult Find Aggregate flows between regions, for various products, for various vessels, or various corporations. Example questions that can be answered with this endpoint: How many Crude/Condensate barrels have been imported into China each day over the last year? How many tonnes of Fuel Oil has company X exported from the United States each week over the last 2 years? How have long-term Medium-Sour floating storage levels changed over time? Arguments filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state', 'oil_on_water_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_corporations : A corporation ID, or list of corporation IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. disable_geographic_exclusion_rules : This controls a popular industry term \"intra-movements\" and determines the filter behaviour for cargo leaving then entering the same geographic area. timeseries_activity : The cargo movement activity we want to aggregate on. This param defaults to filter_activity if left blank. For example, Let's say we want to aggregate the unloading timestamps of all cargo movements that loaded in 2019, then we'd use filter_time_min and filter_time_max to specify 1st Jan 2019 and 31st Dec 2019 respectively, we'd set filter_activity='loading_state' and timeseries_activity='unloading_state' to filter on loadings but aggregate on unloadings. filter_activity Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. timeseries_frequency : Frequency denoting the granularity of the time series. Must be one of ['day', 'week', 'doe_week', 'month', 'quarter', 'year'] timeseries_unit : A numeric metric to be calculated for each time bucket. Must be one of ['b', 'bpd', 't', 'tpd', 'c', 'cpd'], corresponding to barrels, barrels per day, metric tonnes, metric tonnes per day, cargo movement count, cargo movement count per day, respectively. timeseries_activity_time_span_min : The minimum amount of time in milliseconds accounted for in a time series activity. Can be used to request long-term floating storage. For example, to only return floating storage movements that occurred for more than 14 days enter timeseries_activity_time_span_min=1000 * 60 * 60 * 24 * 14 in conjunction with filter_activity='storing_state' . timeseries_activity_time_span_max : The maximum amount of time in milliseconds accounted for in a time series activity. Can be used to request short-term floating storage. For example, to only return floating storage movements that occurred for less than 14 days enter timeseries_activity_time_span_max=1000 * 60 * 60 * 24 * 14 in conjunction with filter_activity='storing_state' . Returns TimeSeriesResult Example What was the monthly average barrels per day of crude loaded from Rotterdam over the last year? >>> from vortexasdk import CargoTimeSeries, Geographies, Products >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> crude = [p.id for p in Products().search(\"crude\").to_list() if \"Crude\" == p.name] >>> search_result = CargoTimeSeries().search( ... timeseries_unit='bpd', ... timeseries_frequency='month', ... filter_origins=rotterdam, ... filter_products=crude, ... filter_activity='loading_state', ... filter_time_min=datetime(2018, 1, 1), ... filter_time_max=datetime(2018, 12, 31)) >>> df = search_result.to_df() Gives the following: key count value 0 2018-01-01T00:00:00.000Z 0.354839 458665 1 2018-02-01T00:00:00.000Z 0.75 45024 2 2018-03-01T00:00:00.000Z 0.0645161 35663.5 3 2018-04-01T00:00:00.000Z 0.878777 12345.2 4 2018-05-01T00:00:00.000Z 0.455932 9999.32 5 2018-06-01T00:00:00.000Z 0.777667 12234.8 6 2018-07-01T00:00:00.000Z 0.555097 987666 7 2018-08-01T00:00:00.000Z 0.290323 5318008.1 8 2018-09-01T00:00:00.000Z 0.0333333 686888.87 9 2018-10-01T00:00:00.000Z 0.354839 234344 10 2018-11-01T00:00:00.000Z 0.2345 111111 11 2018-12-01T00:00:00.000Z 0.123129 34344.5","title":"Cargo Time Series"},{"location":"endpoints/corporations/","text":"vortexasdk.endpoints.corporations Try me out in your browser: Corporations Corporations(self) Corporations Endpoint. load_all Corporations.load_all(self) -> vortexasdk.endpoints.corporations_result.CorporationsResult Load all corporations. search Corporations.search(self, term: Union[str, List[str]] = None, exact_term_match: bool = False) -> vortexasdk.endpoints.corporations_result.CorporationsResult Find all Corporations matching given search terms. Arguments term : The corporation name(s) we're filtering on exact_term_match : Search on only exact term matches, or allow similar matches. e.g. When searching for \"COS\" with exact_term_match=False , then the SDK will yield corporations named ['COSCO', 'COSMO OIL'] etc. When exact_term_match=True , the SDK won't yield any results, because no corporations have the exact name \"COS\". Returns List of corporation matching term Examples Let's load all corporations >>> from vortexasdk import Corporations >>> df = Corporations().search().to_df() returns id name corporate_entity_type 0 04f418ee78c1e17744ad653e7815e8e28891ed9ba25a8427030e4478e5c00974 3J ['effective_controller'] 1 b6384cf17f1639a64bbff04cfd32257bf732a3a13e4b0532802a9ae84a36be34 5XJAPANESE ['effective_controller'] Let's find all corporations with 'do' in the name. >>> [x.name for x in Corporations().search(term=\"do\").to_list()] [...] Further Documentation VortexaAPI Corporation Reference reference Corporations.reference(self, id: str) -> Dict Perform a corporation lookup. Arguments id : Corporation ID to lookup Returns Corporation record matching the ID Further Documentation: VortexaAPI Corporation Reference Examples Corporations().reference(id='12345abcdef') # doctest: +SKIP vortexasdk.endpoints.corporations_result CorporationsResult CorporationsResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Vessels endpoint. to_list CorporationsResult.to_list(self) -> List[vortexasdk.api.corporation.Corporation] Represent vessels as a list. to_df CorporationsResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent corporations as a pd.DataFrame . Arguments columns : The corporation features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'corporate_entity_type'] . Returns pd.DataFrame of corporations.","title":"Corporations"},{"location":"endpoints/destination_breakdown/","text":"vortexasdk.endpoints.destination_breakdown Try me out in your browser: DestinationBreakdown DestinationBreakdown(self) search DestinationBreakdown.search(self, breakdown_geography: str = 'country', breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Destination locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_geography : Geography hierarchy of the origin to aggregate upon. Must be one of the following: 'terminal' , 'port' , 'country' , 'shipping_region' , 'region' , 'trading_block' , 'trading_region' , 'trading_subregion' , 'sts_zone' , 'waypoint' . breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example _Breakdown by destination terminal of cargoes departing from the port of origin over the last 5 days, in tonnes. >>> from vortexasdk import DestinationBreakdown, Geographies >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = DestinationBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_geography=\"terminal\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key label value count 0 606e73162cfd0492919ef96b04dae1bfddda09d148d03bafc1dc3eab979a9b0a SPSE - DPF - G.I.E. Petroleum Terminal 785819 12 1 844756c877c680ce0ff582a46b5bb1cf34cc33179df977a609b2c10838d9db5d SK Energy (Ulsan) 288529 11 2 a5269f5a20759b3a120af66a298fa2385a2b81d8b248aec590db73ecd984f8b7 Dongying CNOOC Oil & Petrochemicals Shandong 201283 11 3 78fcabe3bb6a47f2aa019ae9948be43c5ebbe08a2d1cba7b113315c85362cb7c Kandla Oil Terminal 121762 15 4 15db6ca55a3b13d3c4b135afcaf87f5d605680ac75177412af05be37fc3fec38 Pirpau Island 62933 12","title":"Destination Breakdown"},{"location":"endpoints/eia_forecasts/","text":"vortexasdk.endpoints.eia_forecasts Try me out in your browser: EIAForecasts EIAForecasts(self) EIA forecasts Endpoint, use this to search through Vortexa's EIA Forecasts data. The data includes: date : date of the forecast forecast_fri : Vortexa's data science based forecast of the EIA number to be published on the week value : Actual EIA import/export numbers as published by the EIA Weekly Supply Estimates report stocks : EIA stocks (kbl) cover : Cover (days of Supply for the whole of the US, as published by the EIA Weekly Supply Estimates report) runs : refinery runs (refiner \u201cPercent Operable Utilization\u201d as published by the EIA Weekly Supply Estimates report) search EIAForecasts.search(self, preset: str = 'padd1-gasoline-imports', filter_time_min: datetime.datetime = datetime.datetime(2020, 1, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2020, 1, 31, 0, 0)) -> vortexasdk.endpoints.eia_forecasts_result.EIAForecastResult Find EIA forecasts for a given preset and date range. Arguments preset : Use to specify what geography and product information you would like to query. Preset can be : 'padd1-gasoline-imports', 'padd3-gasoline-imports', 'padd5-gasoline-imports', 'us-gasoline-exports', 'padd1-crude-imports', 'padd3-crude-imports', 'padd5-crude-imports', 'us-crude-exports', 'padd1-diesel-imports', 'padd3-diesel-imports', 'padd5-diesel-imports', 'us-diesel-exports', 'padd1-jet-imports', 'padd5-jet-imports', 'us-jet-exports', 'padd1-fueloil-imports', 'padd3-fueloil-imports', 'padd5-fueloil-imports' or 'us-fueloil-exports' filter_time_min : The UTC start date of the time filter filter_time_max : The UTC end date of the time filter Returns List of EIA Forecast object matching selected 'preset'. Examples Find PADD5 gasoline imports EIA forecasts from January 2019. >>> from datetime import datetime >>> from vortexasdk import EIAForecasts >>> df = EIAForecasts().search( ... preset=\"padd5-gasoline-imports\", ... filter_time_min=datetime(2020, 1, 1), ... filter_time_max=datetime(2020, 1, 31) ... ).to_df() returns date forecast_fri value stocks cover runs 2020-01-31T00:00:00.000Z 454.96048964485 323 9541 26.5 65.9 2020-01-24T00:00:00.000Z 545.453497230504 579 10461 25.9 61.5 2020-01-17T00:00:00.000Z 510.289752707662 549 10325 25.2 64.7 2020-01-10T00:00:00.000Z 469.841470826967 2020-01-03T00:00:00.000Z 640.443229654771 Some values can be NULL: value, stocks, cover, runs. It can happen when: it's a very recent forecast, the Vortexa's data science based forecast (forecast_fri) is available but the complete EIA data isn't yet it's an older forecast and the data is not available vortexasdk.endpoints.eia_forecasts_result EIAForecastResult EIAForecastResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the EIAForecasts endpoint. to_list EIAForecastResult.to_list(self) -> List[vortexasdk.api.eia_forecast.EIAForecast] Represent EIAForecast data as a list. to_df EIAForecastResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent EIA forecasts as a pd.DataFrame . Arguments columns : The EIA forecasts columns we want in the dataframe. Enter columns='all' to include all columns. Defaults to columns = ['date', 'forecast_fri', 'value', 'stocks', 'cover', 'runs'] . Returns pd.DataFrame of EIA forecasts.","title":"EIA Forecasts"},{"location":"endpoints/fixtures/","text":"vortexasdk.endpoints.fixtures Try me out in your browser: Fixtures Fixtures(self) Fixtures Endpoint, use this to search through Vortexa's Fixtures data. A detailed explanation of the fixtures can be found here . Limitation Fixtures are available through the UI, API & SDK only by permission from our shipbroker partner only. If we limit API access to certain products then you can only pull the fixtures data for said product e.g. CPP only, then they only have access to CPP fixtures. For an API key to have access to the endpoint, it needs the scopes \"v.r.fix\" and \"v.r.ais\". What conditions produce a Fixture 'fulfilled' status? - Internal In terms of the data, we use the laycan and the mapped fixture 'origin'. Historical movements: The start timestamp of the loading event must be within the 5-day laycan window (even if the laycan window is less than 5 days, we expand it to 5), or the laycan must be within the start and end timestamp of the loading event and the fixture's origin hierarchy must agree with the actual loading polygon's hierarchy. Future movements: The vessel can be in the reported fixture origin within the laycan window give or take 3 days. We also compare the predicted destination's hierarchy with the fixture's origin hierarchy. An agreement (given that the previous feasibility condition is met) is a sufficient condition to create a movement. When there is disagreement or we don't have a predicted destination, we take into account other factors (e.g. if the destination is a waypoint, we treat them as agreeing). Fixture status Fixture status indicates the point that the deal has reached in its evolution from \"Subs\" for vessels on subjects, to \"Fxd\" for fixed vessels or sometimes \"Failed\" or \"FLD\" for failed fixtures or sometimes \"RPLC\" for a replacement fixture or \"Conf\" for confirmed and \"Corr\" for corrected. What does the model do in the case of exact duplicates? For historical movements, we don't have a particular logic. For future movements, we apply our own internal sorting procedure. When we have 2 fixtures that are near exact duplicates but with different freight rates or different charterers, how does the model pick? For historical movements, randomly. For future movements, we apply our own internal sorting procedure. search Fixtures.search(self, filter_time_field: str = 'fixing_timestamp', filter_time_min: datetime.datetime = datetime.datetime(2020, 1, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2020, 1, 2, 0, 0), ids: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_flags: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, order: str = None, order_direction: str = None, size: int = None) -> vortexasdk.endpoints.fixtures_result.FixtureResult Find Fixtures matching filters and date range. Arguments filter_time_field : The field that the time range should be filtered against. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. ids : Filter specific fixtures. filter_charterers : A charterer ID, or list of charterer IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_flags : A vessel flag, or list of vessel flags to filter on. exclude_origins : A geography ID, or list of geography IDs to exclude. exclude_destinations : A geography ID, or list of geography IDs to exclude. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_charterers : A charterer ID, or list of charterer IDs to exclude. exclude_vessel_flags : A geography ID, or list of geography IDs to exclude. order : Used to sort the returned results. order_direction : Determines the direction of sorting. Returns List of Fixtures objects. Examples Find Fixtures from January 2020. >>> from datetime import datetime >>> from vortexasdk import Fixtures >>> df = Fixtures().search( ... filter_time_field=\"fixing_timestamp\", ... filter_time_min=datetime(2020, 1, 1), ... filter_time_max=datetime(2020, 1, 2), ... ).to_df() returns vessel.name tones origin.label product.label ALPINE EAGLE 454.96048964485 UK Crude Some values can be NULL: value, stocks, cover, runs. It can happen when: The fixture scope is needed to access this endpoint. vortexasdk.endpoints.fixtures_result FixtureResult FixtureResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Fixtures endpoint. to_list FixtureResult.to_list(self) -> List[vortexasdk.api.fixture.Fixture] Represent Fixtures data as a list. to_df FixtureResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent Fixtures as a pd.DataFrame . Arguments columns : The Fixtures columns we want in the dataframe. Defaults to columns = [ \"id\", 'vessels.corporate_entities.charterer.id', 'vessels.corporate_entities.charterer.label', 'vessels.corporate_entities.charterer.layer', 'vessels.corporate_entities.charterer.probability', 'vessels.corporate_entities.charterer.source', 'vessels.corporate_entities.effective_controller.id', 'vessels.corporate_entities.effective_controller.label', 'vessels.corporate_entities.effective_controller.layer', 'vessels.corporate_entities.effective_controller.probability', 'vessels.corporate_entities.effective_controller.source', 'vessels.corporate_entities.time_charterer.end_timestamp', 'vessels.corporate_entities.time_charterer.id', 'vessels.corporate_entities.time_charterer.label', 'vessels.corporate_entities.time_charterer.layer', 'vessels.corporate_entities.time_charterer.probability', 'vessels.corporate_entities.time_charterer.source', 'vessels.corporate_entities.time_charterer.start_timestamp', 'vessels.cubic_capacity', 'vessels.dwt', 'vessels.end_timestamp', 'vessels.fixture_fulfilled', 'vessels.fixture_id', 'vessels.id', 'vessels.imo', 'vessels.mmsi', 'vessels.name', 'vessels.start_timestamp', 'vessels.status', 'vessels.tags.end_timestamp', 'vessels.tags.start_timestamp', 'vessels.tags.tag', 'vessels.vessel_class', 'vessels.voyage_id', \"laycan_from\", \"laycan_to\", \"tones\", \"fixing_timestamp\", \"fulfilled\", \"vtx_fulfilled\", \"destination.label\", \"destination.id\", \"origin.label\", \"origin.id\", \"product.label\", \"product.id\", \"charterer.label\", \"charterer.id\", ] . A near complete list of columns is given below [ \"id\", \"vessel.id\", \"vessel.name\", \"laycan_from\", \"laycan_to\", \"tones\", \"fixing_timestamp\", \"fulfilled\", \"vtx_fulfilled\", \"destination.label\", \"origin.label\", \"product.label\", \"charterer.label\", ] Returns pd.DataFrame of Fixtures.","title":"Fixtures"},{"location":"endpoints/fixtures/#what-does-the-model-do-in-the-case-of-exact-duplicates","text":"For historical movements, we don't have a particular logic. For future movements, we apply our own internal sorting procedure.","title":"What does the model do in the case of exact duplicates?"},{"location":"endpoints/fixtures/#when-we-have-2-fixtures-that-are-near-exact-duplicates-but-with-different-freight-rates-or-different-charterers-how-does-the-model-pick","text":"For historical movements, randomly. For future movements, we apply our own internal sorting procedure.","title":"When we have 2 fixtures that are near exact duplicates but with different freight rates or different charterers, how does the model pick?"},{"location":"endpoints/freight_pricing_search/","text":"vortexasdk.endpoints.freight_pricing_search Try me out in your browser: FreightPricingSearch FreightPricingSearch(self) Freight Pricing Endpoint, use this to search through Vortexa's Baltic Exchange pricing data. search FreightPricingSearch.search(self, routes: Union[List[str], str] = None, days: List[datetime.datetime] = [], offset: int = None, order: str = None, order_direction: str = None) -> vortexasdk.endpoints.freight_pricing_result.FreightPricingResult List of pricing information applicable for a specified route on a given day. Arguments routes : Used to filter by specific routes. Must be one of the following: Clean routes - TC1 , TC2_37 , TC5 , TC6 , TC7 , TC8 , TC9 , TC10 , TC11 , TC12 , TC14 , TC15 , TC16 , TC17 , TC18 , TC19 . Dirty routes - TD1 , TD2 , TD3C , TD6 , TD7 , TD8 , TD9 , TD12 , TD14 , TD15 , TD17 , TD18 , TD19 , TD20 , TD21 , TD22 , TD23 , TD24 , TD25 , TD26 . BLPG routes - BLPG1 , BLPG2 , BLPG3 . days : Used to filter results by day on which the record was generated. Must be an ISO date array or not supplied. order : Used to sort the returned results. Must be either 'record_date' or not supplied. order_direction : Determines the direction of sorting. \u2018asc\u2019 for ascending, \u2018desc\u2019 for descending. offset : Used to page results. The offset from which records should be returned. size : Used to page results. The size of the result set. Between 0 and 500. Returns FreightPricingResult Example WS rate for the TD3C route generated on 15th Nov 2021. >>> from vortexasdk import FreightPricingSearch >>> from datetime import datetime >>> day = [datetime(2021, 11, 15)] >>> df = FreightPricingSearch().search( ... routes=['TD3C'], ... days=day ... ).to_df(columns=['short_code','rate','rate_unit']) short_code rate rate_unit 0 TD3C 43.32 WS","title":"Search"},{"location":"endpoints/freight_pricing_timeseries/","text":"vortexasdk.endpoints.freight_pricing_timeseries Try me out in your browser: FreightPricingTimeseries FreightPricingTimeseries(self) search FreightPricingTimeseries.search(self, time_min: datetime.datetime = datetime.datetime(2021, 9, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2021, 11, 1, 0, 0), routes: Union[List[str], str] = None, breakdown_frequency: str = None, breakdown_property: str = None) -> vortexasdk.endpoints.timeseries_result.TimeSeriesResult Time series of the selected pricing information for given routes in the specified time range. Arguments time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. breakdown_frequency : Must be one of: 'day' , 'week' , 'doe_week' , 'month' , 'quarter' or 'year' . breakdown_property : Property used to build the value of the aggregation. Must be one of the following: route , cost , tce . routes : Used to filter by specific routes. Must be one of the following: Clean routes - TC1 , TC2_37 , TC5 , TC6 , TC7 , TC8 , TC9 , TC10 , TC11 , TC12 , TC14 , TC15 , TC16 , TC17 , TC18 , TC19 . Dirty routes - TD1 , TD2 , TD3C , TD6 , TD7 , TD8 , TD9 , TD12 , TD14 , TD15 , TD17 , TD18 , TD19 , TD20 , TD21 , TD22 , TD23 , TD24 , TD25 , TD26 . BLPG routes - BLPG1 , BLPG2 , BLPG3 . Returns TimeSeriesResult Example Time series for the WS rate of the TD3C route between 1st and 15th November 2021. >>> from vortexasdk import FreightPricingTimeseries >>> from datetime import datetime >>> start = datetime(2021, 11, 1) >>> end = datetime(2021, 11, 15) >>> df = (FreightPricingTimeseries().search( ... time_min=start, ... time_max=end, ... routes=['TD3C'], ... breakdown_property='rate', ... breakdown_frequency='day') ... .to_df()).head(2) Gives the following: key value count 0 2021-11-01 00:00:00+00:00 46.04999923706055 1 1 2021-11-02 00:00:00+00:00 45.13999938964844 1","title":"Time Series"},{"location":"endpoints/geographies/","text":"vortexasdk.endpoints.geographies Try me out in your browser: Geographies Geographies(self) Geographies endpoint. load_all Geographies.load_all(self) -> vortexasdk.endpoints.geographies_result.GeographyResult Load all geographies. search Geographies.search(self, term: Union[str, List[str]] = None, exact_term_match: bool = False, filter_layer: str = None) -> vortexasdk.endpoints.geographies_result.GeographyResult Find all geographies matching given search terms. Arguments term : The geography name (or names) we're filtering on exact_term_match : Search on only exact term matches, or allow similar matches. e.g. When searching for \"China\" with exact_term_match=False , then the SDK will yield geographies named ['China', 'South China', 'China Energy Services Ningbo'...] etc. When exact_term_match=True , the SDK will only yield the geography named China . filter_layer : Must be one of geographical type ['terminal', 'port', 'country', 'shipping_region', 'region', 'trading_block', 'trading_region', 'trading_subregion', 'sts_zone', 'waypoint', 'storage', 'root']. Returns List of geographies matching term Examples Find all geographies with portsmouth in the name. >>> from vortexasdk import Geographies >>> [x.name for x in Geographies().search(term=\"portsmouth\").to_list()] ['Portsmouth [GB]', 'Portsmouth, NH [US]'] Search multiple geography terms >>> df = Geographies().search(term=[\"Liverpool\", \"Southampton\"]).to_df() returns id name layer 0 b63d8f625669fd... Liverpool [GB] ['port'] 1 0cb7d4566de0f2... Southampton [GB] ['port'] 2 8b4273e3181f2d... Liverpool Docks ['terminal'] 3 98c50b0d2ee2b1... Liverpool Bulk Liquids ['terminal'] reference Geographies.reference(self, id: str) -> Dict Perform a geography lookup. Arguments id : Geography ID to lookup Returns Geography matching the ID Further Documentation: VortexaAPI Geography Reference vortexasdk.endpoints.geographies_result GeographyResult GeographyResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Geography endpoint. to_list GeographyResult.to_list(self) -> List[vortexasdk.api.geography.Geography] Represent geographies as a list. to_df GeographyResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent geographies as a pd.DataFrame . Arguments columns : The geography features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'layer'] . Returns pd.DataFrame of geographies.","title":"Geographies"},{"location":"endpoints/movement_status_breakdown/","text":"vortexasdk.endpoints.movement_status_breakdown Try me out in your browser: MovementStatusBreakdown MovementStatusBreakdown(self) search MovementStatusBreakdown.search(self, timestamp: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Origin locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . timestamp : The UTC date of the specific movement status to search. breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by movement status of cargoes in tonnes. >>> from vortexasdk import MovementStatusBreakdown >>> start = datetime(2019, 11, 10) >>> timestamp = datetime(2019, 1, 19) >>> end = datetime(2033, 11, 15) >>> df = MovementStatusBreakdown().search( ... filter_activity=\"loading_end\", ... timestamp=timestamp, ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key value count 0 transiting_state 3009799 24 1 identified_for_loading_state 776599 17 2 loading_state 381359 24 3 unloading_state 238723 21 4 storing_state 118285 18 4 ship_to_ship 118285 18","title":"Movement Status Breakdown"},{"location":"endpoints/onshore_inventories_search/","text":"vortexasdk.endpoints.onshore_inventories_search Try me out in your browser: OnshoreInventoriesSearch OnshoreInventoriesSearch(self) Crude Onshore Inventories Endpoint, use this to search through Vortexa's Onshore Inventory data. Please note: you will require a subscription to our Crude Onshore Inventories module to access this endpoint. search OnshoreInventoriesSearch.search(self, corporate_entity_ids: Union[str, List[str]] = None, crude_confidence: List[str] = None, location_ids: Union[str, List[str]] = None, storage_types: List[str] = None, time_min: datetime.datetime = datetime.datetime(2023, 9, 4, 11, 19, 19, 15269), time_max: datetime.datetime = datetime.datetime(2023, 9, 11, 11, 19, 19, 15278)) -> vortexasdk.endpoints.onshore_inventories_result.OnshoreInventoriesResult List of crude onshore inventories across the globe. Arguments corporate_entity_ids : An array of owner ID(s) to filter on. crude_confidence : An array of confidence metrics to filter on. Possible values are: 'confirmed\u2019 , \u2018probable\u2019 , \u2018unlikely\u2019 location_ids : An array of geography ID(s) to filter on. measurement_ids : An array of unique measurements (each COI measurement) to filter on. size : Used to page results. The size of the result set. Between 0 and 500. storage_types : An array of storage types to filter on. Possible values are: 'refinery' , 'non-refinery' , 'commercial' , 'spr' , 'tbd' ,. time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. Returns OnshoreInventoriesResult Data frame example Top 5 Crude Onshore Inventories where 'crude_confidence' status is 'confirmed'. >>> from vortexasdk import OnshoreInventoriesSearch >>> df = OnshoreInventoriesSearch().search(crude_confidence=['confirmed']).to_df().head(5) measurement_id tank_id tank_details.capacity_bbl ... fill_bbl fill_tons fill_cbm 0 1e41bdfc8fa21a1f3d874d41a af83f5475ebd45b9167254667 225055 ... 194898 26648.208642 30986.443224 1 211d96e43ff6893d555f8e7e0 f7c583b26ff8d4e50d4ba9da5 658327 ... 131804 18021.429116 20955.254352 2 5ef5595cadf0161f6b59a0769 7047360864070b7a08802ae82 209196 ... 468790 64097.187910 74531.984520 3 b70f105d6309fb1acdb4b18c5 2ae82a3b79f32105716725460 664169 ... 105934 14484.249886 16842.234792 4 72841f54183a082de91d9er43 802ae82a3b79f321167254667 75669 ... 474814 64920.843406 75489.728232 List example First COI object in the list where 'crude_confidence' status is 'confirmed'. >>> from vortexasdk import OnshoreInventoriesSearch >>> list = OnshoreInventoriesSearch().search(crude_confidence='confirmed').to_list()[0] { measurement_id: '5731385e7b0ce8', tank_id: 'c5a6bf5e95e969cf7', tank_details: { id: 'c5a6bf5e95e969cf7', capacity_bbl: 875573, capacity_cbm: 139205, capacity_ton: 119716, corporate_entity_details: { id: 'b25523ae823b9e38bb11a161eb60d42194f1a886e58dfe39592dcc324f06f60e', label: 'Repsol' }, corporate_entity_id: 'b25523ae823b9e38bb11a161eb60d42194f1a886e58dfe39592dcc324f06f60e', crude_confidence: 'confirmed', last_updated: '2021-08-03T14: 34: 18.533Z', leaf: True, location_id: 'a98c21d06633d86c8c55', location_details: [ { id: 'a98c21d06633d86c8c55', label: 'CartagenaLNGTerminal(Enagas)', layer: 'terminal' }, { id: 'c7baa1cfb2a11e7c2eca', label: 'Cartagena[ ES ]', layer: 'port' } ], name: 'CGA030', pos: (-0.926539, 37.574), radius: 45, ref_type: 'asset_tank', storage_terminal_id: 'e757382d4aa5a8aa77d0f11ac7f535fb32993bae89bdf581771f155d1c0149b8', storage_terminal_name: 'RepsolCartagenaRefinery', storage_type: 'refinery' }, measurement_timestamp: '2021-09-06T17: 50: 12', publish_timestamp: '2021-09-08T13: 59: 45', report_timestamp: '2021-09-09T14: 00: 00', carry_forward: False, fill_bbl: 732345, fill_tons: 100132.79950499999, fill_cbm: 116434.06685999999, reference_data_version: '20210906-1631611377217' }","title":"Search"},{"location":"endpoints/onshore_inventories_timeseries/","text":"vortexasdk.endpoints.onshore_inventories_timeseries Try me out in your browser: OnshoreInventoriesTimeseries OnshoreInventoriesTimeseries(self) Please note: you will require a subscription to our Crude Onshore Inventories module to access this endpoint. search OnshoreInventoriesTimeseries.search(self, corporate_entity_ids: Union[str, List[str]] = None, crude_confidence: List[str] = None, location_ids: Union[str, List[str]] = None, storage_types: List[str] = None, asset_tank_ids: Union[str, List[str]] = None, time_max: datetime.datetime = None, time_min: datetime.datetime = None, timeseries_frequency: str = None, timeseries_split_property: str = None, timeseries_unit: str = None, timeseries_unit_operator: str = None, exclude_corporate_entity_ids: List[str] = None, exclude_crude_confidence: List[str] = None, exclude_location_ids: Union[str, List[str]] = None, exclude_storage_types: List[str] = None) -> vortexasdk.endpoints.breakdown_result.BreakdownResult Sum of crude onshore inventories storage and total capacity updated weekly. For frequencies other than 'week', the values returned are calculated by returning the final weekly onshore inventories 'quantity' bucket for the specified period. Arguments corporate_entity_ids : An array of owner ID(s) to filter on. crude_confidence : An array of confidence metrics to filter on. Possible values are: 'confirmed\u2019 , \u2018probable\u2019 , \u2018unlikely\u2019 location_ids : An array of geography ID(s) to filter on. storage_types : An array of storage types to filter on. Possible values are: 'refinery' , 'non-refinery' , 'commercial' , 'spr' , 'tbd' asset_tank_ids : An array of unique Asset Tanks ID(s) to filter on - linked to the Asset Tank Reference data. time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. timeseries_frequency : Frequency denoting the granularity of the time series. Must be one of the following: 'week' , 'month' , 'year' . timeseries_split_property : Property used to breakdown the aggregation. By default 'quantity' is used which returns only the total, but aggregations can be broken down by either 'crude_confidence' , 'storage_type' , 'location_country' , 'location_port' , 'location_region' , 'location_shipping_region' , 'location_trading_region' , 'location_trading_subregion' timeseries_unit : A numeric metric to be calculated for each time bucket. Must be either 'b' , 't' , 'cbm' corresponding to barrels, metric tonnes, cubic meters. timeseries_unit_operator : Argument must be either 'fill' (total in storage) or 'capacity' (total capacity). exclude_corporate_entity_ids : An array of owner ID(s) to exclude from the results, exclude_crude_confidence : An array of confidence metrics to exclude from the results exclude_location_ids : An array of geography ID(s) to exclude from the results exclude_storage_types : An array of storage types to exclude from the results Returns BreakdownResult Example Total storage capacity across Europe for the first week of January 2021. >>> from vortexasdk import OnshoreInventoriesTimeseries >>> from datetime import datetime >>> search_result = OnshoreInventoriesTimeseries().search( ... location_ids=[\"f39d455f5d38907394d6da3a91da4e391f9a34bd6a17e826d6042761067e88f4\"], ... time_min=datetime(2021, 1, 5), ... time_max=datetime(2021, 1, 12), ... timeseries_frequency=\"week\", ... timeseries_split_property=\"location_country\", ... timeseries_unit=\"b\", ... timeseries_unit_operator=\"capacity\", ... ).to_list() Gives the following result: [ BreakdownItem(key='2021-09-09T14:00:00.000Z', count=3769, value=994621677, breakdown=[ { 'id': 'ee1de4914cc26e8f1326b49793b089131870d478714c07e0c99c56cb307704c5', 'label': 'Italy', 'value': 204482432, 'count': 762 }, { 'id': '2aaad41b89dfad19e5668918018ae02695d7710bcbe5f2dc689234e8da492de3', 'label': 'UnitedKingdom', 'value': 113001186, 'count': 415 }, { 'id': '284c8d9831e1ac59c0cb714468856d561af722c8a2432c13a001f909b97e6b71', 'label': 'Germany', 'value': 93583672, 'count': 405 }, { 'id': 'e9e556620469f46a4dc171aef71073f5286a288da35c5883cac760446b0ceb46', 'label': 'France', 'value': 86652291, 'count': 327 }, ... ]) ]","title":"Time Series"},{"location":"endpoints/origin_breakdown/","text":"vortexasdk.endpoints.origin_breakdown Try me out in your browser: OriginBreakdown OriginBreakdown(self) search OriginBreakdown.search(self, breakdown_geography: str = 'country', breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Origin locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_geography : Geography hierarchy of the origin to aggregate upon. Must be one of the following: 'terminal' , 'port' , 'country' , 'shipping_region' , 'region' , 'trading_block' , 'trading_region' , 'trading_subregion' , 'sts_zone' , 'waypoint' . breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by origin terminal of cargoes departing from the port of origin over the last 5 days, in tonnes. >>> from vortexasdk import OriginBreakdown, Geographies >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = OriginBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_geography=\"terminal\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key label value count 0 c3daea3cc9c5b3bd91c90882d42c2a418c4cf17b90ff12da3ac78444282a238a Juaymah Crude Oil Terminal 3009799 24 1 3a39cf841ece0c7cb879f72af01cb634191142e0de8010d5ef877fd66c2e8605 Houston Enterprise Terminal 776599 17 2 345b7661310bc82a04e0a4edffd02c286c410c023b53edfb90ed3386640c0476 Arzew GL1Z/GL2Z LNG Terminal 381359 24 3 9dfa3be1b42d1f5e80361b6f442b5217b486876ad0c25e382055887c9e231ad2 SabTank (PCQ-1) Al Jubail 238723 21 4 4813dd7209e85b128cc2fbc7c08fef08d26259550210f28a5c7ff3ccd7b2ba61 Mailiao Industrial Park-Formosa Plastics 118285 18","title":"Origin Breakdown"},{"location":"endpoints/parent_product_breakdown/","text":"vortexasdk.endpoints.parent_product_breakdown Try me out in your browser: ParentProductBreakdown ParentProductBreakdown(self) search ParentProductBreakdown.search(self, breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Origin locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by product of the cargoes in tonnes. >>> from vortexasdk import ParentProductBreakdown >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = ParentProductBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key label value count 0 c3daea3cc9c5b3bd91c90882d42c2a418c4cf17b90ff12da3ac78444282a238a Juaymah Crude Oil Terminal 3009799 24 1 3a39cf841ece0c7cb879f72af01cb634191142e0de8010d5ef877fd66c2e8605 Houston Enterprise Terminal 776599 17 2 345b7661310bc82a04e0a4edffd02c286c410c023b53edfb90ed3386640c0476 Arzew GL1Z/GL2Z LNG Terminal 381359 24 3 9dfa3be1b42d1f5e80361b6f442b5217b486876ad0c25e382055887c9e231ad2 SabTank (PCQ-1) Al Jubail 238723 21 4 4813dd7209e85b128cc2fbc7c08fef08d26259550210f28a5c7ff3ccd7b2ba61 Mailiao Industrial Park-Formosa Plastics 118285 18","title":"Parent Product Breakdown"},{"location":"endpoints/product_breakdown/","text":"vortexasdk.endpoints.product_breakdown Try me out in your browser: ProductBreakdown ProductBreakdown(self) search ProductBreakdown.search(self, breakdown_product: str = 'group', breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Origin locations breakdown aggregation by geographic area Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_product : This represents the different levels of the product hierarchy. With 'group' being the top level, and grade being the lowest - or most accurate. 'group', 'group_product', 'category', 'grade' breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by product of the cargoes in tonnes. >>> from vortexasdk import ProductBreakdown >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = ProductBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_product=\"group\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key label value count 0 c3daea3cc9c5b3bd91c90882d42c2a418c4cf17b90ff12da3ac78444282a238a Juaymah Crude Oil Terminal 3009799 24 1 3a39cf841ece0c7cb879f72af01cb634191142e0de8010d5ef877fd66c2e8605 Houston Enterprise Terminal 776599 17 2 345b7661310bc82a04e0a4edffd02c286c410c023b53edfb90ed3386640c0476 Arzew GL1Z/GL2Z LNG Terminal 381359 24 3 9dfa3be1b42d1f5e80361b6f442b5217b486876ad0c25e382055887c9e231ad2 SabTank (PCQ-1) Al Jubail 238723 21 4 4813dd7209e85b128cc2fbc7c08fef08d26259550210f28a5c7ff3ccd7b2ba61 Mailiao Industrial Park-Formosa Plastics 118285 18","title":"Product Breakdown"},{"location":"endpoints/products/","text":"vortexasdk.endpoints.products Try me out in your browser: Products Products(self) Products endpoint. load_all Products.load_all(self) -> vortexasdk.endpoints.products_result.ProductResult Load all products. search Products.search(self, term: Union[str, List[str]] = None, ids: Union[str, List[str]] = None, product_parent: Union[str, List[str]] = None, exact_term_match: bool = False, filter_layer: str = None) -> vortexasdk.endpoints.products_result.ProductResult Find all products matching given search terms. Arguments term : The name(s) (or partial name(s)) of a product we'd like to search ids : ID or IDs of products we'd like to search product_parent : ID, or list of IDs of the immediate product parent. E.g. product_parent ='12345' will return all children of product 12345 . exact_term_match : By default, the SDK returns all products which name includes the search term. For example, searching for \"Gasoil\" will return results including \"Gasoil\", \"Gasoil 0.4pc\", \"Gasoil 500ppm\" etc. Setting exact_search_match to true ensure that only exact term matches are returned, ie just \"Gasoil\" in this case. filter_layer : Must be one of product types ['group', 'group_product', 'category', 'grade']. Returns List of products matching the search arguments. Examples Let's look for products with in one of ['diesel', 'fuel oil', 'grane'] their name, or related names. >>> from vortexasdk import Products >>> df = Products().search(term=['diesel', 'fuel oil', 'grane']).to_df('all') Returns id name layer.0 leaf parent.0.name parent.0.layer.0 parent.0.id meta.api_min meta.api_max ref_type meta.sulphur_min meta.sulphur_max 0 1c107b4317bc2c8... Fuel Oil category False Dirty products product 5de0b00094e0fd... 12.8878 12.8878 product nan nan 1 fddedd17e02507f... Grane grade True Medium-Sour subproduct_group a7e26956fbb917... 29.2955 29.2955 product 0.62 0.62 2 deda35eb9ca56b5... Diesel/Gasoil category False Clean products product b68cbb7746f8b9... 35.9556 35.9556 product nan nan Further Documentation VortexaAPI Product Reference reference Products.reference(self, id: str) -> Dict Perform a product lookup. Arguments id : Product ID to lookup Returns Product record matching the ID Further Documentation: VortexaAPI Product Reference vortexasdk.endpoints.products_result ProductResult ProductResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Product endpoint. to_list ProductResult.to_list(self) -> List[vortexasdk.api.product.Product] Represent products as a list. to_df ProductResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent products as a pd.DataFrame . Arguments columns : The product features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'layer.0', 'parent.0.name'] . Returns pd.DataFrame of products.","title":"Products"},{"location":"endpoints/storage_terminals/","text":"vortexasdk.endpoints.storage_terminals Try me out in your browser: StorageTerminals StorageTerminals(self) Storage Terminals endpoint. A Storage Terminal is a reference value that corresponds to an ID associated with other entities. For example, a storage terminal object may have the following keys: { \"name\": \"Military Oil Depot\", \"parent\": { \"name\": \"Bandar Khomeini, Bandar Mahshahr [IR]\" } ... } These IDs represent storage terminals which can be found via the Storage Terminal reference endpoint. When the storage terminals endpoint is searched with those ids as parameters: >>> from vortexasdk import StorageTerminals >>> df = StorageTerminals().search(ids=[\"08bbaf7a67ab30036d73b9604b932352a73905e16b8342b27f02ae34941b7db5\"]).to_df() Returns id name lat lon 0 08bbaf7a67ab30036d73... Military Oil Depot 90 180 load_all StorageTerminals.load_all(self) -> vortexasdk.endpoints.storage_terminals_result.StorageTerminalResult Load all storage terminals. search StorageTerminals.search(self, ids: Union[str, List[str]] = None, term: Union[str, List[str]] = None) -> vortexasdk.endpoints.storage_terminals_result.StorageTerminalResult Find all storage terminals matching given term. Arguments ids : List of storage terminal ids to filter by. term : List of terms to filter on. Returns List of storage terminals matching the ids or term specified. Examples Find a storage terminal by term, for example the name of the storage terminal. >>> from vortexasdk import StorageTerminals >>> df = StorageTerminals().search(term=[\"Military\"]).to_df() Returns id name lat lon 0 08bbaf7a67ab30036d73... Military Oil Depot 90 180 vortexasdk.endpoints.storage_terminals_result StorageTerminalResult StorageTerminalResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Storage Terminals endpoint. to_list StorageTerminalResult.to_list(self) -> List[vortexasdk.api.storage_terminal.StorageTerminal] Represent storage terminals as a list. to_df StorageTerminalResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent storage terminals as a pd.DataFrame . Arguments columns : The storage terminals features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'lat', 'lon'] . Returns pd.DataFrame of storage terminals.","title":"Storage Terminals"},{"location":"endpoints/vessel_availability_breakdown/","text":"vortexasdk.endpoints.vessel_availability_breakdown Try me out in your browser: VesselAvailabilityBreakdown VesselAvailabilityBreakdown(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VesselAvailabilityBreakdown.search(self, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_status: str = None, filter_vessel_location: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_destination: Union[str, List[str]] = None, filter_region: str = None, filter_port: str = None, use_reference_port: bool = False, filter_days_to_arrival: List[Dict[str, int]] = None, filter_vessel_dwt_min: int = None, filter_vessel_dwt_max: int = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_idle_min: int = None, filter_vessel_idle_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_recent_visits: str = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_vessel_status: str = None, exclude_vessel_location: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_destination: Union[str, List[str]] = None) -> vortexasdk.endpoints.timeseries_result.TimeSeriesResult Number and DWT of all vessels that can be available to load a given cargo at a given port, grouped by the number of days to arrival. Arguments filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_destination : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels filter_vessel_location : A location ID, or list of location IDs to filter on. filter_port : Filter by port ID. filter_region : Filter by region ID - takes precedence over filter_port if provided. This should be used in conjunction with use_reference_port filter_days_to_arrival : Filter availability by time to arrival in days` use_reference_port : If this flag is enabled, we will return data for the reference port instead of the user selected one, filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_idle_min : A number greater than 0 (representing idle days). filter_vessel_idle_max : A number greater than 0 and filter_vessel_idle_min (representing idle days). filter_vessel_dwt_min : A number between 0 and 550000. filter_vessel_dwt_max : A number between 0 and 550000. filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_recent_visits : Filter availability by each vessel's recent visits exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to filter on. exclude_destination : A location ID, or list of location IDs to filter on. Returns TimeSeriesResult Example Breakdown of number and DWT of all vessels arriving at Rotterdam in the next 5 days. >>> from vortexasdk import VesselAvailabilityBreakdown, Geographies >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> df = VesselAvailabilityBreakdown().search( ... filter_port=rotterdam[0], ... filter_days_to_arrival={\"min\": 0, \"max\": 5} ... ).to_df() Gives the following: key value count 0 2021-06-23 00:00:00+00:00 2939754 34 1 2021-06-24 00:00:00+00:00 2676732 38 2 2021-06-25 00:00:00+00:00 6262914 74 3 2021-06-26 00:00:00+00:00 3445105 43 4 2021-06-27 00:00:00+00:00 3924460 51","title":"Breakdown"},{"location":"endpoints/vessel_availability_search/","text":"vortexasdk.endpoints.vessel_availability_search Try me out in your browser: VesselAvailabilitySearch VesselAvailabilitySearch(self) Vessel Availability Endpoint, use this to search through Vortexa's vessel availability data. Please note: you will require a subscription to our Freight module to access this endpoint. search VesselAvailabilitySearch.search(self, filter_region: str = None, filter_port: str = None, use_reference_port: bool = None, filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_status: str = None, filter_vessel_location: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_destination: Union[str, List[str]] = None, filter_days_to_arrival: List[Dict[str, int]] = None, filter_vessel_dwt_min: int = None, filter_vessel_dwt_max: int = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_idle_min: int = None, filter_vessel_idle_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_recent_visits: str = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_vessel_status: str = None, exclude_vessel_location: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_destination: Union[str, List[str]] = None, offset: int = None, order: str = None, order_direction: str = None) -> vortexasdk.endpoints.vessel_availability_result.VesselAvailabilityResult List of vessels that can be available to load a given cargo at a given port on a future date. Arguments order : Used to sort the returned results. Must be one of the following: [\u2018vessel_status\u2019, \u2018days_to_arrival\u2019, \u2018days_idle\u2019]. order_direction : Determines the direction of sorting. \u2018asc\u2019 for ascending, \u2018desc\u2019 for descending. offset : Used to page results. The offset from which records should be returned. size : Used to page results. The size of the result set. Between 0 and 500. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_destination : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels filter_vessel_location : A location ID, or list of location IDs to filter on. filter_port : Filter by port ID. filter_region : Filter by region ID - takes precedence over filter_port if provided. This should be used in conjunction with use_reference_port filter_days_to_arrival : Filter availability by time to arrival in days` use_reference_port : If this flag is enabled, we will return data for the reference port instead of the user selected one, filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_idle_min : A number greater than 0 (representing idle days). filter_vessel_idle_max : A number greater than 0 and filter_vessel_idle_min (representing idle days). filter_vessel_dwt_min : A number between 0 and 550000. filter_vessel_dwt_max : A number between 0 and 550000. filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_recent_visits : Filter availability by each vessel's recent visits exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to filter on. exclude_destination : A location ID, or list of location IDs to filter on. Returns VesselAvailabilityResult Example Top 2 available vessels arriving at Rotterdam port in the next 5 days. >>> from vortexasdk import VesselAvailabilitySearch, Geographies >>> rotterdam = \"68faf65af1345067f11dc6723b8da32f00e304a6f33c000118fccd81947deb4e\" >>> df = VesselAvailabilitySearch().search( ... filter_port=rotterdam, ... filter_days_to_arrival={\"min\": 1, \"max\": 5} ... ).to_df(columns=['available_at','vessel_name','vessel_class']).head(2) available_at vessel_name vessel_class 0 2017-09-30 15:30:27+00:00 STAR RIVER handysize 1 2017-08-29 14:51:32+00:00 AMALTHEA aframax","title":"Search"},{"location":"endpoints/vessel_availability_timeseries/","text":"vortexasdk.endpoints.vessel_availability_timeseries Try me out in your browser: VesselAvailabilityTimeseries VesselAvailabilityTimeseries(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VesselAvailabilityTimeseries.search(self, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_vessel_status: str = None, filter_vessel_location: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_destination: Union[str, List[str]] = None, filter_region: str = None, filter_port: str = None, use_reference_port: bool = False, filter_days_to_arrival: List[Dict[str, int]] = None, filter_vessel_dwt_min: int = None, filter_vessel_dwt_max: int = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_idle_min: int = None, filter_vessel_idle_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_recent_visits: str = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_vessel_status: str = None, exclude_vessel_location: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_destination: Union[str, List[str]] = None) -> vortexasdk.endpoints.timeseries_result.TimeSeriesResult Time series of the number of vessels that can be available to load a given cargo at a given port for every day in the specified range. Arguments filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_destination : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels filter_vessel_location : A location ID, or list of location IDs to filter on. filter_port : Filter by port ID. filter_region : Filter by region ID - takes precedence over filter_port if provided. This should be used in conjunction with use_reference_port filter_days_to_arrival : Filter availability by time to arrival in days` use_reference_port : If this flag is enabled, we will return data for the reference port instead of the user selected one, filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_idle_min : A number greater than 0 (representing idle days). filter_vessel_idle_max : A number greater than 0 and filter_vessel_idle_min (representing idle days). filter_vessel_dwt_min : A number between 0 and 550000. filter_vessel_dwt_max : A number between 0 and 550000. filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_recent_visits : Filter availability by each vessel's recent visits exclude_products : A product ID, or list of product IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_vessel_status : The vessel status on which to base the filter. Enter 'vessel_status_ballast' for ballast vessels, 'vessel_status_laden_known' for laden vessels with known cargo (i.e. a type of cargo that Vortexa currently tracks) or 'any_activity' for any other vessels exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to filter on. exclude_destination : A location ID, or list of location IDs to filter on. Returns TimeSeriesResult Example Time series for the number of vessels available between 0 to 5 days, at port Rotterdam, over 4 days. >>> from vortexasdk import VesselAvailabilityTimeseries, Geographies >>> from datetime import datetime >>> rotterdam = \"68faf65af1345067f11dc6723b8da32f00e304a6f33c000118fccd81947deb4e\" >>> start = datetime(2021, 6, 17) >>> end = datetime(2021, 6, 21) >>> df = (VesselAvailabilityTimeseries().search( ... filter_time_min=start, ... filter_time_max=end, ... filter_port=rotterdam, ... filter_days_to_arrival={\"min\": 0, \"max\": 5}, ... ).to_df()) Gives the following: key value count 0 2021-06-23 00:00:00+00:00 19225923 224 1 2021-06-24 00:00:00+00:00 19634766 233 2 2021-06-25 00:00:00+00:00 19154857 228 3 2021-06-26 00:00:00+00:00 18410395 225","title":"Time Series"},{"location":"endpoints/vessel_class_breakdown/","text":"vortexasdk.endpoints.vessel_class_breakdown Try me out in your browser: VesselClassBreakdown VesselClassBreakdown(self) search VesselClassBreakdown.search(self, breakdown_unit_average_basis: str = None, filter_activity: str = 'any_activity', breakdown_unit: str = 'b', disable_geographic_exclusion_rules: bool = None, breakdown_size: int = None, filter_time_min: datetime.datetime = datetime.datetime(2019, 10, 1, 0, 0), filter_time_max: datetime.datetime = datetime.datetime(2019, 10, 1, 1, 0), filter_products: Union[str, List[str]] = None, filter_charterers: Union[str, List[str]] = None, filter_vessels: Union[str, List[str]] = None, filter_vessel_classes: Union[str, List[str]] = None, filter_owners: Union[str, List[str]] = None, filter_effective_controllers: Union[str, List[str]] = None, filter_vessel_flags: Union[str, List[str]] = None, filter_vessel_ice_class: Union[str, List[str]] = None, filter_vessel_propulsion: Union[str, List[str]] = None, filter_origins: Union[str, List[str]] = None, filter_destinations: Union[str, List[str]] = None, filter_storage_locations: Union[str, List[str]] = None, filter_waypoints: Union[str, List[str]] = None, filter_ship_to_ship_locations: Union[str, List[str]] = None, filter_vessel_age_min: int = None, filter_vessel_age_max: int = None, filter_vessel_scrubbers: str = 'disabled', filter_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None, exclude_products: Union[str, List[str]] = None, exclude_vessels: Union[str, List[str]] = None, exclude_vessel_classes: Union[str, List[str]] = None, exclude_owners: Union[str, List[str]] = None, exclude_effective_controllers: Union[str, List[str]] = None, exclude_charterers: Union[str, List[str]] = None, exclude_vessel_flags: Union[str, List[str]] = None, exclude_destinations: Union[str, List[str]] = None, exclude_origins: Union[str, List[str]] = None, exclude_waypoints: Union[str, List[str]] = None, exclude_storage_locations: Union[str, List[str]] = None, exclude_ship_to_ship_locations: Union[str, List[str]] = None, exclude_vessel_ice_class: Union[str, List[str]] = None, exclude_vessel_propulsion: Union[str, List[str]] = None, exclude_vessel_tags: Union[List[vortexasdk.api.shared_types.Tag], vortexasdk.api.shared_types.Tag] = None) -> vortexasdk.endpoints.reference_breakdown_result.ReferenceBreakdownResult Vessel class locations breakdown aggregation Arguments breakdown_unit_average_basis : Per day metrics only - movement activity on which to base the average metric. Can be one of state properties of a cargo movement: identified_for_loading_state , loading_state , transiting_state , storing_state , ship_to_ship , unloading_state , unloaded_state , oil_on_water_state , unknown_state , or one of time properties of a cargo movement: identified_for_loading_at , loading_start , loading_end , storing_start , storing_end , ship_to_ship_start , ship_to_ship_end , unloading_start , unloading_end . breakdown_unit : Units to aggregate upon. Must be one of the following: 'b' , 't' , 'cbm' , 'bpd' , 'tpd' , 'mpd' . breakdown_size : Number of top geographies to return. Default is 5. disable_geographic_exclusion_rules : A boolean which specifies whether certain movements should be excluded, based on a combination of their origin and destination. filter_activity : Cargo movement activity on which to base the time filter. The endpoint only includes cargo movements matching that match this filter in the aggregations. Must be one of ['loading_state', 'loading_start', 'loading_end', 'identified_for_loading_state', 'unloading_state', 'unloading_start', 'unloading_end', 'storing_state', 'storing_start', 'storing_end', 'transiting_state']. filter_time_min : The UTC start date of the time filter. filter_time_max : The UTC end date of the time filter. filter_effective_controllers : An effective controller ID, or list of effective controller IDs to filter on. filter_vessel_flags : A vessel flag ID, or list of vessel flag IDs to filter on. filter_vessel_ice_class : An ice class ID, or list of ice class IDs to filter on. filter_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to filter on. filter_charterers : An commercial entity ID, or list of commercial entity IDs to filter on. filter_origins : A geography ID, or list of geography IDs to filter on. filter_destinations : A geography ID, or list of geography IDs to filter on. filter_storage_locations : A geography ID, or list of geography IDs to filter on. filter_waypoints : A geography ID, or list of geography IDs to filter on. filter_ship_to_ship_locations : A geography ID, or list of geography IDs to filter on. filter_products : A product ID, or list of product IDs to filter on. filter_vessels : A vessel ID, or list of vessel IDs to filter on. filter_vessel_classes : A vessel class, or list of vessel classes to filter on. filter_vessel_age_min : A number between 1 and 100 (representing years). filter_vessel_age_max : A number between 1 and 100 (representing years). filter_vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. filter_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. exclude_products : A product ID, or list of product IDs to exclude. exclude_vessel_flags : A vessel flag ID, or list of vessel flag IDs to exclude. exclude_vessel_ice_class : An ice class ID, or list of ice class IDs to exclude. exclude_vessel_propulsion : An propulsion means ID, or list of propulsion means IDs to exclude. exclude_vessels : A vessel ID, or list of vessel IDs to exclude. exclude_vessel_classes : A vessel class, or list of vessel classes to exclude. exclude_effective_controllers : An effective controller ID, or list of effective controller IDs to exclude. exclude_vessel_location : A location ID, or list of location IDs to exclude. exclude_destinations : A location ID, or list of location IDs to exclude. exclude_origins : A location ID, or list of location IDs to exclude. exclude_storage_locations : A location ID, or list of location IDs to exclude. exclude_waypoints : A location ID, or list of location IDs to exclude. exclude_ship_to_ship_locations : A location ID, or list of location IDs to exclude. exclude_vessel_tags : A time bound vessel tag, or list of time bound vessel tags to exclude. Returns ReferenceBreakdownResult Example Breakdown by vessel class of cargoes in tonnes. >>> from vortexasdk import VesselClassBreakdown >>> start = datetime(2019, 11, 10) >>> end = datetime(2019, 11, 15) >>> df = VesselClassBreakdown().search( ... filter_activity=\"loading_end\", ... breakdown_unit=\"t\", ... breakdown_size=5, ... filter_time_min=start, ... filter_time_max=end ... ).to_df() Gives the following: key value count 0 vlcc_plus 94855526 24 1 aframax 776599 17 2 handymax 381359 24 3 handysize 238723 21 4 panamax 118285 18 5 tiny_tanker 118285 18 6 general_purpose 118285 18 5 sgc 118285 18","title":"Vessel Class Breakdown"},{"location":"endpoints/vessels/","text":"vortexasdk.endpoints.vessels Try me out in your browser: Vessels Vessels(self) Vessels endpoint. load_all Vessels.load_all(self) -> vortexasdk.endpoints.vessels_result.VesselsResult Load all vessels. search Vessels.search(self, term: Union[str, List[str]] = None, ids: Union[str, List[str]] = None, vessel_classes: Union[str, List[str]] = None, vessel_product_types: Union[str, List[str]] = None, vessel_scrubbers: str = 'disabled', exact_term_match: bool = False) -> vortexasdk.endpoints.vessels_result.VesselsResult Find all vessels matching given search arguments. Search arguments are combined in an AND manner. Arguments term : The name(s) (or partial name(s)) of a vessel we'd like to search ids : ID or IDs of vessels we'd like to search vessel_classes : vessel_class (or list of vessel classes) we'd like to search. Each vessel class must be one of \"tiny_tanker\" , \"general_purpose\" , \"handysize\" , \"handymax\" , \"panamax\", \"aframax\" , \"suezmax\" , \"vlcc_plus\" , \"sgc\" , \"mgc\" , \"lgc\" , \"vlgc\" . Refer to VortexaAPI Vessel Entities for the most up-to-date list of vessel classes. vessel_product_types : A product ID, or list of product IDs to filter on, searching vessels currently carrying these products. vessel_scrubbers : An optional filter to filter on vessels with or without scrubbers. To disable the filter (the default behaviour), enter 'disabled'. To only include vessels with scrubbers, enter 'inc'. To exclude vessels with scrubbers, enter 'exc'. exact_term_match : Search on only exact term matches, or allow similar matches. e.g. When searching for \"Ocean\" with exact_term_match=False , then the SDK will yield vessels named ['Ocean', 'Ocean Wisdom', ...] etc. When exact_term_match=True , the SDK will only yield the vessel named Ocean . Returns List of vessels matching the search arguments. Examples Let's find all the VLCCs with 'ocean' in their name, or related names. >>> from vortexasdk import Vessels >>> vessels_df = Vessels().search(vessel_classes='vlcc', term='ocean').to_df(columns=['name', 'imo', 'mmsi', 'related_names']) name imo mmsi related_names 0 OCEANIS 9532757 241089000 ['OCEANIS'] 1 AEGEAN 9732553 205761000 ['GENER8 OCEANUS'] 2 OCEANIA 9246633 205753000 ['OCEANIA' 3 ENEOS OCEAN 9662875 432986000 ['ENEOS OCEAN'] 4 OCEAN LILY 9284960 477178100 ['OCEAN LILY'] 5 SHINYO OCEAN 9197868 636019316 ['SHINYO OCEAN'] 6 NASHA 9079107 370497000 ['OCEANIC'] 7 HUMANITY 9180281 422204700 ['OCEAN NYMPH'] Note the term search also looks for vessels with matching related_names Let's find all the vessels currently carrying Crude. >>> from vortexasdk import Vessels, Products >>> crude = [p.id for p in Products().search(term=\"crude\").to_list() if 'group' in p.layer] >>> vessels_df = Vessels().search(vessel_product_types=crude).to_df() Further Documentation VortexaAPI Vessel Reference reference Vessels.reference(self, id: str) -> Dict Perform a vessel lookup. Arguments id : Vessel ID to lookup Returns Vessel record matching the ID Further Documentation: VortexaAPI Vessel Reference vortexasdk.endpoints.vessels_result VesselsResult VesselsResult(__pydantic_self__, **data: Any) -> None Container class that holds the result obtained from calling the Vessels endpoint. to_list VesselsResult.to_list(self) -> List[vortexasdk.api.vessel.Vessel] Represent vessels as a list. to_df VesselsResult.to_df(self, columns=None) -> pandas.core.frame.DataFrame Represent vessels as a pd.DataFrame . Arguments columns : The vessel features we want in the dataframe. Enter columns='all' to include all features. Defaults to columns = ['id', 'name', 'imo', 'vessel_class'] . Returns pd.DataFrame of vessels.","title":"Vessels"},{"location":"endpoints/voyages_congestion_breakdown/","text":"vortexasdk.endpoints.voyages_congestion_breakdown Try me out in your browser: VoyagesCongestionBreakdown VoyagesCongestionBreakdown(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VoyagesCongestionBreakdown.search(self, breakdown_property: str = 'port', breakdown_size: int = 2000, order: str = None, order_direction: str = None, time_min: datetime.datetime = datetime.datetime(2022, 1, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2022, 1, 1, 1, 0), voyage_id: Union[str, List[str]] = None, cargo_movement_id: Union[str, List[str]] = None, voyage_status: Union[str, List[str]] = None, voyage_status_excluded: Union[str, List[str]] = None, movement_status: Union[str, List[str]] = None, movement_status_excluded: Union[str, List[str]] = None, cargo_status: Union[str, List[str]] = None, cargo_status_excluded: Union[str, List[str]] = None, location_status: Union[str, List[str]] = None, location_status_excluded: Union[str, List[str]] = None, commitment_status: Union[str, List[str]] = None, commitment_status_excluded: Union[str, List[str]] = None, exclude_overlapping_entries: bool = None, products: Union[str, List[str]] = None, products_excluded: Union[str, List[str]] = None, latest_products: Union[str, List[str]] = None, latest_products_excluded: Union[str, List[str]] = None, charterers: Union[str, List[str]] = None, charterers_excluded: Union[str, List[str]] = None, effective_controllers: Union[str, List[str]] = None, effective_controllers_excluded: Union[str, List[str]] = None, origins: Union[str, List[str]] = None, origins_excluded: Union[str, List[str]] = None, destinations: Union[str, List[str]] = None, destinations_excluded: Union[str, List[str]] = None, locations: Union[str, List[str]] = None, locations_excluded: Union[str, List[str]] = None, vessels: Union[str, List[str]] = None, vessels_excluded: Union[str, List[str]] = None, flags: Union[str, List[str]] = None, flags_excluded: Union[str, List[str]] = None, ice_class: Union[str, List[str]] = None, ice_class_excluded: Union[str, List[str]] = None, vessel_propulsion: Union[str, List[str]] = None, vessel_propulsion_excluded: Union[str, List[str]] = None, vessel_age_min: int = None, vessel_age_max: int = None, vessel_dwt_min: int = None, vessel_dwt_max: int = None, vessel_cbm_min: int = None, vessel_cbm_max: int = None, vessel_wait_time_min: int = None, vessel_wait_time_max: int = None, vessel_scrubbers: str = None, vessels_tags: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessels_tags_excluded: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessel_risk_level: Union[str, List[str]] = None, vessel_risk_level_excluded: Union[str, List[str]] = None, has_ship_to_ship: bool = None, has_charterer: bool = None) -> vortexasdk.endpoints.voyages_congestion_breakdown_result.CongestionBreakdownResult Returns a count of congested voyages aggregated by congestion location. Arguments breakdown_size : Number of top records to return. breakdown_property : Property to aggregate upon. Can be one of: 'port' , 'shipping_region' , 'terminal' . time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. voyage_id : An array of unique voyage ID(s) to filter on. cargo_movement_id : An array of unique cargo movement ID(s) to filter on. voyage_status : A voyage status, or list of voyage statuses to filter on. Can be one of: 'ballast' , 'laden' . voyage_status_excluded : A voyage status, or list of voyage statuses to exclude. movement_status : A movement status, or list of movement statuses to filter on. Can be one of: 'moving' , 'stationary' , 'waiting' , 'congestion' , 'slow' . movement_status_excluded : A movement status, or list of movement statuses to exclude. cargo_status : A cargo status, or list of cargo statuses to filter on. Can be one of: 'in-transit' , 'floating-storage' , 'loading' , 'discharging' . cargo_status_excluded : A cargo status, or list of cargo statuses to exclude. location_status : A location status, or list of location statuses to filter on. Can be one of: 'berth' , 'anchorage-zone' , 'dry-dock' , 'on-the-sea' . location_status_excluded : A location status, or list of location statuses to exclude. commitment_status : A commitment status, or list of commitment statuses to filter on. Can be one of: 'committed' , 'uncommitted' , 'open' , 'unknown' . commitment_status_excluded : A commitment status, or list of commitment statuses to exclude. exclude_overlapping_entries : A boolean to only consider the latest voyage in days where two or more Voyages overlap. products : A product ID, or list of product IDs to filter on. products_excluded : A product ID, or list of product IDs to exclude. latest_products : A product ID, or list of product IDs of the latest cargo on board to filter on. latest_products_excluded : A product ID, or list of product IDs of the latest cargo on board to exclude. charterers : A charterer ID, or list of charterer IDs to filter on. charterers_excluded : A charterer ID, or list of charterer IDs to exclude. effective_controllers : A vessel effective controller ID, or list of vessel effective controller IDs to filter on. effective_controllers_excluded : A effective controller ID, or list of effective controller IDs to exclude. origins : An origin ID, or list of origin IDs to filter on. origins_excluded : An origin ID, or list of origin IDs to exclude. destinations : A destination ID, or list of destination IDs to filter on. destinations_excluded : A destination ID, or list of destination IDs to exclude. locations : A location ID, or list of location IDs to filter on. locations_excluded : A location ID, or list of location IDs to exclude. vessels : A vessel ID or vessel class, or list of vessel IDs/vessel classes to filter on. vessels_excluded : A vessel ID or vessel class, or list of vessel IDs/vessel classes to exclude. flags : A flag, or list of flags to filter on. flags_excluded : A flag, or list of flags to exclude. ice_class : An ice class, or list of ice classes to filter on. ice_class_excluded : An ice class, or list of ice classes to \u0119xclude. vessel_propulsion : A propulsion method, or list of propulsion methods to filter on. vessel_propulsion_excluded : A propulsion method, or list of propulsion methods to \u0119xclude. vessel_age_min : A number between 1 and 100 (representing years). vessel_age_max : A number between 1 and 100 (representing years). vessel_dwt_min : A number representing minimum deadweight tonnage of a vessel. vessel_dwt_max : A number representing maximum deadweight tonnage of a vessel. vessel_cbm_min : A number representing minimum cubic capacity of a vessel. vessel_cbm_max : A number representing maximum cubic capacity tonnage of a vessel. vessel_wait_time_min : A number representing a minimum number of days until a vessel becomes available. vessel_wait_time_max : A number representing a maximum number of days until a vessel becomes available. vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. vessels_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. vessels_tags_excluded : A time bound vessel tag, or list of time bound vessel tags to exclude. vessel_risk_level : A vessel risk level, or list of vessel risk levels to filter on. vessel_risk_level_excluded : A vessel risk level, or list of vessel risk levels to exclude. has_ship_to_ship : A boolean to show data where at least one STS transfer occurs. has_charterer : A boolean to show data where at least one charterer is specified. order : Used to sort the returned results. Can be one of: 'location' , 'avg_wait' , 'dwt' , 'capacity' , 'count' . order_direction : Determines the direction of sorting. \u2018asc\u2019 for ascending, \u2018desc\u2019 for descending. Returns CongestionBreakdownResult Example Stats for vessels in congestion on 26th April 2022 split by shipping region. >>> from vortexasdk import VoyagesCongestionBreakdown >>> from datetime import datetime >>> search_result = VoyagesCongestionBreakdown().search( ... time_min=datetime(2022, 4, 26), ... time_max=datetime(2022, 4, 26, 23, 59), ... movement_status=\"congestion\", ... breakdown_property=\"shipping_region\", ... breakdown_size=2, ... ).to_df() Gives the following result: | | avg_waiting_time | vessel_dwt | vessel_cubic_capacity | vessel_count | cargo_quantity | avg_waiting_time_laden | vessel_dwt_laden | vessel_cubic_capacity_laden | vessel_count_laden | avg_waiting_time_ballast | vessel_dwt_ballast | vessel_cubic_capacity_ballast | vessel_count_ballast | location_details.0.label | |---:|-------------------:|-------------:|------------------------:|---------------:|-----------------:|-------------------------:|-------------------:|------------------------------:|---------------------:|---------------------------:|---------------------:|--------------------------------:|-----------------------:|:---------------------------| | 0 | 0 | 9199789 | 10271697 | 353 | 320829 | 0 | 7104725 | 7943428 | 161 | 0 | 2095064 | 2328269 | 192 | East Asia | | 1 | 0 | 6415240 | 7241430 | 248 | 106209 | 0 | 3392911 | 3815449 | 126 | 0 | 3022329 | 3425981 | 122 | South East Asia |","title":"Congestion Breakdown"},{"location":"endpoints/voyages_search_enriched/","text":"vortexasdk.endpoints.voyages_search_enriched Try me out in your browser: VoyagesSearchEnriched VoyagesSearchEnriched(self) search VoyagesSearchEnriched.search(self, order: str = None, order_direction: str = None, offset: int = None, unit: str = None, columns: Union[str, List[str]] = None, time_min: datetime.datetime = datetime.datetime(2022, 1, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2022, 1, 1, 1, 0), voyage_id: Union[str, List[str]] = None, cargo_movement_id: Union[str, List[str]] = None, voyage_status: Union[str, List[str]] = None, voyage_status_excluded: Union[str, List[str]] = None, movement_status: Union[str, List[str]] = None, movement_status_excluded: Union[str, List[str]] = None, cargo_status: Union[str, List[str]] = None, cargo_status_excluded: Union[str, List[str]] = None, location_status: Union[str, List[str]] = None, location_status_excluded: Union[str, List[str]] = None, commitment_status: Union[str, List[str]] = None, commitment_status_excluded: Union[str, List[str]] = None, exclude_overlapping_entries: bool = None, products: Union[str, List[str]] = None, products_excluded: Union[str, List[str]] = None, latest_products: Union[str, List[str]] = None, latest_products_excluded: Union[str, List[str]] = None, charterers: Union[str, List[str]] = None, charterers_excluded: Union[str, List[str]] = None, effective_controllers: Union[str, List[str]] = None, effective_controllers_excluded: Union[str, List[str]] = None, origins: Union[str, List[str]] = None, origins_excluded: Union[str, List[str]] = None, destinations: Union[str, List[str]] = None, destinations_excluded: Union[str, List[str]] = None, locations: Union[str, List[str]] = None, locations_excluded: Union[str, List[str]] = None, vessels: Union[str, List[str]] = None, vessels_excluded: Union[str, List[str]] = None, flags: Union[str, List[str]] = None, flags_excluded: Union[str, List[str]] = None, ice_class: Union[str, List[str]] = None, ice_class_excluded: Union[str, List[str]] = None, vessel_propulsion: Union[str, List[str]] = None, vessel_propulsion_excluded: Union[str, List[str]] = None, vessel_age_min: int = None, vessel_age_max: int = None, vessel_dwt_min: int = None, vessel_dwt_max: int = None, vessel_cbm_min: int = None, vessel_cbm_max: int = None, vessel_wait_time_min: int = None, vessel_wait_time_max: int = None, vessel_scrubbers: str = None, vessels_tags: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessels_tags_excluded: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessel_risk_level: Union[str, List[str]] = None, vessel_risk_level_excluded: Union[str, List[str]] = None, has_ship_to_ship: str = None, has_charterer: str = None) -> Union[vortexasdk.endpoints.voyages_search_enriched_result.VoyagesSearchEnrichedFlattenedResult, vortexasdk.endpoints.voyages_search_enriched_result.VoyagesSearchEnrichedListResult] Returns one record per voyage, containing a selection of information about the voyage. NOTE: To display results as a list ( to_list() ), please set the columns parameter to None . To display results as dataframe ( to_df() ), please set the columns parameter to all or a list of selected columns. Arguments time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. voyage_id : An array of unique voyage ID(s) to filter on. cargo_movement_id : An array of unique cargo movement ID(s) to filter on. voyage_status : A voyage status, or list of voyage statuses to filter on. Can be one of: 'ballast' , 'laden' . voyage_status_excluded : A voyage status, or list of voyage statuses to exclude. movement_status : A movement status, or list of movement statuses to filter on. Can be one of: 'moving' , 'stationary' , 'waiting' , 'congestion' , 'slow' . movement_status_excluded : A movement status, or list of movement statuses to exclude. cargo_status : A cargo status, or list of cargo statuses to filter on. Can be one of: 'in-transit' , 'floating-storage' , 'loading' , 'discharging' . cargo_status_excluded : A cargo status, or list of cargo statuses to exclude. location_status : A location status, or list of location statuses to filter on. Can be one of: 'berth' , 'anchorage-zone' , 'dry-dock' , 'on-the-sea' . location_status_excluded : A location status, or list of location statuses to exclude. commitment_status : A commitment status, or list of commitment statuses to filter on. Can be one of: 'committed' , 'uncommitted' , 'open' , 'unknown' . commitment_status_excluded : A commitment status, or list of commitment statuses to exclude. exclude_overlapping_entries : A boolean to only consider the latest voyage in days where two or more Voyages overlap. products : A product ID, or list of product IDs to filter on. products_excluded : A product ID, or list of product IDs to exclude. latest_products : A product ID, or list of product IDs of the latest cargo on board to filter on. latest_products_excluded : A product ID, or list of product IDs of the latest cargo on board to exclude. charterers : A charterer ID, or list of charterer IDs to filter on. charterers_excluded : A charterer ID, or list of charterer IDs to exclude. effective_controllers : A vessel effective controller ID, or list of vessel effective controller IDs to filter on. effective_controllers_excluded : A effective controller ID, or list of effective controller IDs to exclude. origins : An origin ID, or list of origin IDs to filter on. origins_excluded : An origin ID, or list of origin IDs to exclude. destinations : A destination ID, or list of destination IDs to filter on. destinations_excluded : A destination ID, or list of destination IDs to exclude. locations : A location ID, or list of location IDs to filter on. locations_excluded : A location ID, or list of location IDs to exclude. vessels : A vessel ID or vessel class, or list of vessel IDs/vessel classes to filter on. vessels_excluded : A vessel ID or vessel class, or list of vessel IDs/vessel classes to exclude. flags : A flag, or list of flags to filter on. flags_excluded : A flag, or list of flags to exclude. ice_class : An ice class, or list of ice classes to filter on. ice_class_excluded : An ice class, or list of ice classes to \u0119xclude. vessel_propulsion : A propulsion method, or list of propulsion methods to filter on. vessel_propulsion_excluded : A propulsion method, or list of propulsion methods to \u0119xclude. vessel_age_min : A number between 1 and 100 (representing years). vessel_age_max : A number between 1 and 100 (representing years). vessel_dwt_min : A number representing minimum deadweight tonnage of a vessel. vessel_dwt_max : A number representing maximum deadweight tonnage of a vessel. vessel_cdm_min : A number representing minimum cubic capacity of a vessel. vessel_cbm_max : A number representing maximum cubic capacity of a vessel. vessel_wait_time_min : A number representing a minimum number of days until a vessel becomes available. vessel_wait_time_max : A number representing a maximum number of days until a vessel becomes available. vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. vessels_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. vessels_tags_excluded : A time bound vessel tag, or list of time bound vessel tags to exclude. vessel_risk_level : A vessel risk level, or list of vessel risk levels to filter on. vessel_risk_level_excluded : A vessel risk level, or list of vessel risk levels to exclude. has_ship_to_ship : Filter data where at least one STS transfer occurs, or none. - one of disabled , inc , exc . Passing disabled means the filter is not active. has_charterer : Filter data where at least one charterer is specified, or none. - one of disabled , inc , exc . Passing disabled means the filter is not active. order : Used to sort the returned results. Can be one of: 'vessel_name' , 'dwt' , 'vessel_class' , 'start_date' , 'end_date' . order_direction : Determines the direction of sorting. \u2018asc\u2019 for ascending, \u2018desc\u2019 for descending. offset : Used to page results. The offset from which records should be returned. unit : Unit of measurement. Enter 'b' for barrels or 't' for tonnes or 'cbm' for cubic metres. columns : Determines what columns are visible in the output. Enter \"all\" for all columns, or any of: 'vessel_name' , 'imo' , 'dwt' , 'capacity' , 'vessel_class' , 'voyage_status' , 'cargo_status' , 'origin' , 'origin_shipping_region' , 'origin_region' , 'origin_country' , 'origin_port' , 'origin_terminal' , 'destination' , 'destination_shipping_region' , 'destination_region' , 'destination_country' , 'destination_port' , 'destination_terminal' , 'destination_eta' , 'charterer' , 'effective_controller' , 'voyage_type' , 'quantity' , 'latest_product' , 'latest_product_group' , 'latest_product_category' , 'latest_product_grade' , 'time_charterer' , 'flag' , 'scrubber' , 'build_year' , 'risk_rating' , 'coating' , 'start_date' , 'end_date' , 'tonne_miles' , 'distance' , 'voyage_id' , 'previous_voyage_id' , 'next_voyage_id' , 'duration' , 'first_origin' , 'first_origin_shipping_region' , 'first_origin_country' , 'first_origin_port' , 'first_origin_terminal' , 'final_destination' , 'final_destination_shipping_region' , 'final_destination_country' , 'final_destination_port' , 'final_destination_terminal' . Returns VoyagesSearchEnrichedListResult or VoyagesSearchEnrichedFlattenedResult Example Voyages as of 13th Feb 2023 for vessels carrying crude departing from Rotterdam. >>> from vortexasdk import VoyagesSearchEnriched, Geographies >>> from datetime import datetime >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> start = datetime(2023, 2, 13) >>> end = datetime(2023, 2, 13, 23, 59) >>> search_result = VoyagesSearchEnriched().search( ... time_min=start, ... time_max=end, ... origins=rotterdam, ... columns=\"all\", ... ).to_df().head() Gives the following result: | | VESSEL NAME | IMO | DWT (t) | CAPACITY (cbm) | VESSEL CLASS | VOYAGE STATUS | ORIGIN | ORIGIN TERMINAL | ORIGIN PORT | ORIGIN COUNTRY | ORIGIN SHIPPING REGION | DESTINATION | DESTINATION TERMINAL | DESTINATION PORT | DESTINATION COUNTRY | DESTINATION SHIPPING REGION | START DATE | END DATE | LATEST PRODUCT | LATEST PRODUCT GROUP | LATEST PRODUCT CATEGORY | LATEST PRODUCT GRADE | QUANTITY (bbl) | CHARTERER | EFFECTIVE CONTROLLER | TIME CHARTERER | BUILD YEAR | FLAG | RISK RATING | SCRUBBERS | COATING | TONNE-MILES | DURATION (h) | DISTANCE | VOYAGE ID | PREVIOUS VOYAGE ID | NEXT VOYAGE ID | ORIGIN GEOGRAPHIC REGION | DESTINATION GEOGRAPHIC REGION | FIRST ORIGIN | FIRST ORIGIN TERMINAL | FIRST ORIGIN PORT | FIRST ORIGIN COUNTRY | FIRST ORIGIN SHIPPING REGION | FINAL DESTINATION | FINAL DESTINATION TERMINAL | FINAL DESTINATION PORT | FINAL DESTINATION COUNTRY | FINAL DESTINATION SHIPPING REGION | |---:|:---------------------|--------:|----------:|-----------------:|:----------------|:----------------|:--------------------------------|:------------------------|:--------------------------------|:--------------------|:-------------------------|:---------------|:-----------------------|:-------------------|:----------------------|:------------------------------|:-------------|:------------|:--------------------------------|:-------------------------|:------------------------------|:-----------------------|-----------------:|:------------|:-----------------------|:-----------------|-------------:|:-------|:--------------|:------------|:----------|:--------------|---------------:|:-----------|:-----------------------------------------------------------------|:-----------------------------------------------------------------|:-----------------|:---------------------------|:--------------------------------|:------------------|:------------------------|:--------------------|:-----------------------|:-------------------------------|:--------------------|:-----------------------------|:-------------------------|:----------------------------|:------------------------------------| | 0 | YM NEPTUNE | 9464106 | 6970 | 8091 | Tiny tanker | Laden | Frontignan [FR], Rotterdam [NL] | , Vopak Terminal Botlek | Frontignan [FR], Rotterdam [NL] | France, Netherlands | West Med, UK Cont | | | | | | 28 Dec 2022 | | Biodiesel, Other Clean Products | Clean Petroleum Products | Finished Biodiesel, Chemicals | | 19058 | | | | 2009 | MT | Low | | Coated | 15708783 | 1139 | 7570 | 0edb92ac9addd29b42ede8a8b4843ceb0edb92ac9addd29b42ede8a8b4843ceb | f2b314f245a391ee4e1ebcc41d9a2d2741526f0f3625183440e7e280092cfe91 | | Europe, Europe | | Frontignan [FR] | | Frontignan [FR] | France | West Med | | | | | | | 1 | YASA POLARIS | 9907457 | 157300 | 167231 | Suezmax | Ballast | Rotterdam [NL] | TEAM Terminal B.V. | Rotterdam [NL] | Netherlands | UK Cont | Rotterdam [NL] | | Rotterdam [NL] | Netherlands | UK Cont | 13 Feb 2023 | | Crude | Crude/Condensates | Light-Sour | CPC Blend | 0 | | Ya-Sa Shipping | | 2021 | MH | Medium | | | | 14 | | ac6c49388567f546d4f57a3e8e8c984e2188f68407394bbe3fde99a0aaff72d7 | f9cab95f35202ab0b273d6d646202080f9cab95f35202ab0b273d6d646202080 | | Europe | Europe | Rotterdam [NL] | TEAM Terminal B.V. | Rotterdam [NL] | Netherlands | UK Cont | Rotterdam [NL] | | Rotterdam [NL] | Netherlands | UK Cont | | 2 | XING HAI WAN | 9570113 | 6123 | 6022 | Tiny tanker | Laden | Rotterdam [NL] | Shell - Rotterdam | Rotterdam [NL] | Netherlands | UK Cont | | | | | | 07 Feb 2023 | | Asphalt/Bitumen | Dirty Petroleum Products | | | 18513 | | | | 2009 | PA | Low | | | 3848284 | 144 | 1257 | 2bb322f03f203bf2570654e6dc22c52e2bb322f03f203bf2570654e6dc22c52e | 2cec64d13c15f4e8999068c63a898335a75bc99b600f97768655ae748b75a2d7 | | Europe | | Rotterdam [NL] | Shell - Rotterdam | Rotterdam [NL] | Netherlands | UK Cont | | | | | | | 3 | XANTHIA | 9246152 | 17031 | 17829 | General purpose | Laden | Rotterdam [NL] | Vopak Terminal Botlek | Rotterdam [NL] | Netherlands | UK Cont | Amsterdam [NL] | | Amsterdam [NL] | Netherlands | UK Cont | 12 Feb 2023 | 15 Feb 2023 | Diesel/Gasoil | Clean Petroleum Products | Gasoil | | 43370 | | Neda Maritime | | 2003 | NO | Low | | Coated | 8334 | 85 | 1 | 640a7b6ae43683ef52bdc5141b5b11a7640a7b6ae43683ef52bdc5141b5b11a7 | 3a7353f9128d669f31e9d775ef53d9355d34928f1a77801da59576d523cb95c5 | | Europe | Europe | Rotterdam [NL] | Vopak Terminal Botlek | Rotterdam [NL] | Netherlands | UK Cont | Amsterdam [NL] | | Amsterdam [NL] | Netherlands | UK Cont | | 4 | WOODSIDE REES WITHER | 9810367 | 96000 | 173400 | Conventional | Ballast | Gate LNG Terminal | Gate LNG Terminal | Rotterdam [NL] | Netherlands | UK Cont | | | | | | 01 Feb 2023 | | LNG | Liquefied Natural Gas | Lean | Corpus Christi LNG | 0 | | MARAN GAS MARITIME | | 2019 | GR | Low | | | | 280 | 3967 | 0fa825ab44e6dc5d352db9e8ef47f41e003a794b97b69677ba5f64b2398456e3 | d51d7fc4c74ed04ec69646d297c2f19cd51d7fc4c74ed04ec69646d297c2f19c | | Europe | | Gate LNG Terminal | Gate LNG Terminal | Rotterdam [NL] | Netherlands | UK Cont | | | | | |","title":"Search Enriched"},{"location":"endpoints/voyages_timeseries/","text":"vortexasdk.endpoints.voyages_timeseries Try me out in your browser: VoyagesTimeseries VoyagesTimeseries(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VoyagesTimeseries.search(self, breakdown_frequency: str = None, breakdown_property: str = None, breakdown_split_property: str = None, breakdown_unit_operator: str = None, time_min: datetime.datetime = datetime.datetime(2022, 1, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2022, 1, 1, 1, 0), voyage_id: Union[str, List[str]] = None, cargo_movement_id: Union[str, List[str]] = None, voyage_status: Union[str, List[str]] = None, voyage_status_excluded: Union[str, List[str]] = None, movement_status: Union[str, List[str]] = None, movement_status_excluded: Union[str, List[str]] = None, cargo_status: Union[str, List[str]] = None, cargo_status_excluded: Union[str, List[str]] = None, location_status: Union[str, List[str]] = None, location_status_excluded: Union[str, List[str]] = None, commitment_status: Union[str, List[str]] = None, commitment_status_excluded: Union[str, List[str]] = None, exclude_overlapping_entries: bool = None, products: Union[str, List[str]] = None, products_excluded: Union[str, List[str]] = None, latest_products: Union[str, List[str]] = None, latest_products_excluded: Union[str, List[str]] = None, charterers: Union[str, List[str]] = None, charterers_excluded: Union[str, List[str]] = None, effective_controllers: Union[str, List[str]] = None, effective_controllers_excluded: Union[str, List[str]] = None, origins: Union[str, List[str]] = None, origins_excluded: Union[str, List[str]] = None, destinations: Union[str, List[str]] = None, destinations_excluded: Union[str, List[str]] = None, locations: Union[str, List[str]] = None, locations_excluded: Union[str, List[str]] = None, congestion_target_location: Union[str, List[str]] = None, congestion_target_location_excluded: Union[str, List[str]] = None, vessels: Union[str, List[str]] = None, vessels_excluded: Union[str, List[str]] = None, flags: Union[str, List[str]] = None, flags_excluded: Union[str, List[str]] = None, ice_class: Union[str, List[str]] = None, ice_class_excluded: Union[str, List[str]] = None, vessel_propulsion: Union[str, List[str]] = None, vessel_propulsion_excluded: Union[str, List[str]] = None, vessel_age_min: int = None, vessel_age_max: int = None, vessel_dwt_min: int = None, vessel_dwt_max: int = None, vessel_cbm_min: int = None, vessel_cbm_max: int = None, vessel_wait_time_min: int = None, vessel_wait_time_max: int = None, vessel_scrubbers: str = None, vessels_tags: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessels_tags_excluded: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessel_risk_level: Union[str, List[str]] = None, vessel_risk_level_excluded: Union[str, List[str]] = None, has_ship_to_ship: bool = None, has_charterer: bool = None) -> vortexasdk.endpoints.breakdown_result.BreakdownResult Returns a count of voyages per record for the requested date period Arguments breakdown_frequency : Frequency denoting the granularity of the time series. Must be one of the following: 'day' , 'week' , 'doe_week' , 'month' , 'quarter' , 'year' . breakdown_property : Property to aggregate upon. Can be one of: 'vessel_count' , 'utilisation' , 'cargo_quantity' , 'avg_wait_time' , 'dwt' , 'cubic_capacity' , 'tonne_miles' , 'avg_distance' , 'avg_speed' . breakdown_split_property : Property to split results by. Can be one of: 'vessel_status' , 'vessel_class' , 'vessel_flag' , 'fixture_status' , 'origin_region' , 'origin_shipping_region' , 'origin_trading_region' , 'origin_trading_sub_region' , 'origin_trading_block' , 'origin_country' , 'origin_port' , 'origin_terminal' , 'destination_region' , 'destination_shipping_region' , 'destination_trading_region' , 'destination_trading_sub_region' , 'destination_trading_block' , 'destination_country' , 'destination_port' , 'destination_terminal' , 'location_port' , 'location_country' , 'location_shipping_region' , 'congestion_location_port' , 'congestion_location_country' , 'congestion_location_shipping_region' , 'product_group' , 'product_group_product' , 'product_category' , 'product_grade' , 'charterer' , 'effective_controller' , 'none' or not provided. breakdown_unit_operator : Denotes the type of the aggregation calculation. Can be one of 'sum' or 'avg' . time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. voyage_id : An array of unique voyage ID(s) to filter on. cargo_movement_id : An array of unique cargo movement ID(s) to filter on. voyage_status : A voyage status, or list of voyage statuses to filter on. Can be one of: 'ballast' , 'laden' . voyage_status_excluded : A voyage status, or list of voyage statuses to exclude. movement_status : A movement status, or list of movement statuses to filter on. Can be one of: 'moving' , 'stationary' , 'waiting' , 'congestion' , 'slow' . movement_status_excluded : A movement status, or list of movement statuses to exclude. cargo_status : A cargo status, or list of cargo statuses to filter on. Can be one of: 'in-transit' , 'floating-storage' , 'loading' , 'discharging' . cargo_status_excluded : A cargo status, or list of cargo statuses to exclude. location_status : A location status, or list of location statuses to filter on. Can be one of: 'berth' , 'anchorage-zone' , 'dry-dock' , 'on-the-sea' . location_status_excluded : A location status, or list of location statuses to exclude. commitment_status : A commitment status, or list of commitment statuses to filter on. Can be one of: 'committed' , 'uncommitted' , 'open' , 'unknown' . commitment_status_excluded : A commitment status, or list of commitment statuses to exclude. exclude_overlapping_entries : A boolean to only consider the latest voyage in days where two or more Voyages overlap. products : A product ID, or list of product IDs to filter on. products_excluded : A product ID, or list of product IDs to exclude. latest_products : A product ID, or list of product IDs of the latest cargo on board to filter on. latest_products_excluded : A product ID, or list of product IDs of the latest cargo on board to exclude. charterers : A charterer ID, or list of charterer IDs to filter on. charterers_excluded : A charterer ID, or list of charterer IDs to exclude. effective_controllers : A vessel effective controller ID, or list of vessel effective controller IDs to filter on. effective_controllers_excluded : A effective controller ID, or list of effective controller IDs to exclude. origins : An origin ID, or list of origin IDs to filter on. origins_excluded : An origin ID, or list of origin IDs to exclude. destinations : A destination ID, or list of destination IDs to filter on. destinations_excluded : A destination ID, or list of destination IDs to exclude. locations : A location ID, or list of location IDs to filter on. locations_excluded : A location ID, or list of location IDs to exclude. congestion_target_location : A congestion location ID, or list of congestion location IDs to filter on. congestion_target_location_excluded : A congestion location ID, or list of congestion location IDs to exclude. vessels : A vessel ID or vessel class, or list of vessel IDs/vessel classes to filter on. vessels_excluded : A vessel ID or vessel class, or list of vessel IDs/vessel classes to exclude. flags : A flag, or list of flags to filter on. flags_excluded : A flag, or list of flags to exclude. ice_class : An ice class, or list of ice classes to filter on. ice_class_excluded : An ice class, or list of ice classes to \u0119xclude. vessel_propulsion : A propulsion method, or list of propulsion methods to filter on. vessel_propulsion_excluded : A propulsion method, or list of propulsion methods to \u0119xclude. vessel_age_min : A number between 1 and 100 (representing years). vessel_age_max : A number between 1 and 100 (representing years). vessel_dwt_min : A number representing minimum deadweight tonnage of a vessel. vessel_dwt_max : A number representing maximum deadweight tonnage of a vessel. vessel_cbm_min : A number representing minimum cubic capacity of a vessel. vessel_cbm_max : A number representing maximum cubic capacity of a vessel. vessel_wait_time_min : A number representing a minimum number of days until a vessel becomes available. vessel_wait_time_max : A number representing a maximum number of days until a vessel becomes available. vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. vessels_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. vessels_tags_excluded : A time bound vessel tag, or list of time bound vessel tags to exclude. vessel_risk_level : A vessel risk level, or list of vessel risk levels to filter on. vessel_risk_level_excluded : A vessel risk level, or list of vessel risk levels to exclude. has_ship_to_ship : A boolean to show data where at least one STS transfer occurs. has_charterer : A boolean to show data where at least one charterer is specified. Returns BreakdownResult Example Sum of vessels departing from Rotterdam between 26th-28th April 2022, split by location country. >>> from vortexasdk import VoyagesTimeseries, Geographies >>> from datetime import datetime >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> search_result = VoyagesTimeseries().search( ... origins=rotterdam, ... time_min=datetime(2022, 4, 26), ... time_max=datetime(2022, 4, 28, 23, 59), ... breakdown_frequency=\"day\", ... breakdown_property=\"vessel_count\", ... breakdown_split_property=\"location_country\", ... ).to_df() Gives the following result: | | key | value | count | breakdown.0.label | breakdown.0.count | breakdown.0.value | |---:|:--------------------------|--------:|--------:|:--------------------|--------------------:|--------------------:| | 0 | 2022-04-26 00:00:00+00:00 | 294 | 294 | Netherlands | 85 | 85 | | 1 | 2022-04-27 00:00:00+00:00 | 281 | 281 | Netherlands | 82 | 82 | | 2 | 2022-04-28 00:00:00+00:00 | 279 | 279 | Netherlands | 85 | 85 |","title":"Time Series"},{"location":"endpoints/voyages_top_hits/","text":"vortexasdk.endpoints.voyages_top_hits Try me out in your browser: VoyagesTopHits VoyagesTopHits(self) Please note: you will require a subscription to our Freight module to access this endpoint. search VoyagesTopHits.search(self, breakdown_property: str = None, breakdown_split_property: str = None, breakdown_size: int = None, time_min: datetime.datetime = datetime.datetime(2022, 1, 1, 0, 0), time_max: datetime.datetime = datetime.datetime(2022, 1, 1, 1, 0), voyage_id: Union[str, List[str]] = None, cargo_movement_id: Union[str, List[str]] = None, voyage_status: Union[str, List[str]] = None, voyage_status_excluded: Union[str, List[str]] = None, movement_status: Union[str, List[str]] = None, movement_status_excluded: Union[str, List[str]] = None, cargo_status: Union[str, List[str]] = None, cargo_status_excluded: Union[str, List[str]] = None, location_status: Union[str, List[str]] = None, location_status_excluded: Union[str, List[str]] = None, commitment_status: Union[str, List[str]] = None, commitment_status_excluded: Union[str, List[str]] = None, exclude_overlapping_entries: bool = None, products: Union[str, List[str]] = None, products_excluded: Union[str, List[str]] = None, latest_products: Union[str, List[str]] = None, latest_products_excluded: Union[str, List[str]] = None, charterers: Union[str, List[str]] = None, charterers_excluded: Union[str, List[str]] = None, effective_controllers: Union[str, List[str]] = None, effective_controllers_excluded: Union[str, List[str]] = None, origins: Union[str, List[str]] = None, origins_excluded: Union[str, List[str]] = None, destinations: Union[str, List[str]] = None, destinations_excluded: Union[str, List[str]] = None, locations: Union[str, List[str]] = None, locations_excluded: Union[str, List[str]] = None, congestion_target_location: Union[str, List[str]] = None, congestion_target_location_excluded: Union[str, List[str]] = None, vessels: Union[str, List[str]] = None, vessels_excluded: Union[str, List[str]] = None, flags: Union[str, List[str]] = None, flags_excluded: Union[str, List[str]] = None, ice_class: Union[str, List[str]] = None, ice_class_excluded: Union[str, List[str]] = None, vessel_propulsion: Union[str, List[str]] = None, vessel_propulsion_excluded: Union[str, List[str]] = None, vessel_age_min: int = None, vessel_age_max: int = None, vessel_dwt_min: int = None, vessel_dwt_max: int = None, vessel_cbm_min: int = None, vessel_cbm_max: int = None, vessel_wait_time_min: int = None, vessel_wait_time_max: int = None, vessel_scrubbers: str = None, vessels_tags: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessels_tags_excluded: Union[vortexasdk.api.shared_types.Tag, List[vortexasdk.api.shared_types.Tag]] = None, vessel_risk_level: Union[str, List[str]] = None, vessel_risk_level_excluded: Union[str, List[str]] = None, has_ship_to_ship: bool = None, has_charterer: bool = None) -> vortexasdk.endpoints.aggregation_breakdown_result.AggregationBreakdownResult Returns a count of voyages aggregated by a chosen breakdown_property . Arguments breakdown_size : Number of top records to return. breakdown_property : Property to aggregate upon. Must be vessel_count or not provided. breakdown_split_property : Property to split results by. Can be one of: 'vessel_status' , 'vessel_class' , 'vessel_flag' , 'fixture_status' , 'origin_region' , 'origin_shipping_region' , 'origin_trading_region' , 'origin_trading_sub_region' , 'origin_trading_block' , 'origin_country' , 'origin_port' , 'origin_terminal' , 'destination_region' , 'destination_shipping_region' , 'destination_trading_region' , 'destination_trading_sub_region' , 'destination_trading_block' , 'destination_country' , 'destination_port' , 'destination_terminal' , 'location_port' , 'location_country' , 'location_shipping_region' , 'congestion_location_port' , 'congestion_location_country' , 'congestion_location_shipping_region' , 'product_group' , 'product_group_product' , 'product_category' , 'product_grade' . time_min : The UTC start date of the time filter. time_max : The UTC end date of the time filter. voyage_id : An array of unique voyage ID(s) to filter on. cargo_movement_id : An array of unique cargo movement ID(s) to filter on. voyage_status : A voyage status, or list of voyage statuses to filter on. Can be one of: 'ballast' , 'laden' . voyage_status_excluded : A voyage status, or list of voyage statuses to exclude. movement_status : A movement status, or list of movement statuses to filter on. Can be one of: 'moving' , 'stationary' , 'waiting' , 'congestion' , 'slow' . movement_status_excluded : A movement status, or list of movement statuses to exclude. cargo_status : A cargo status, or list of cargo statuses to filter on. Can be one of: 'in-transit' , 'floating-storage' , 'loading' , 'discharging' . cargo_status_excluded : A cargo status, or list of cargo statuses to exclude. location_status : A location status, or list of location statuses to filter on. Can be one of: 'berth' , 'anchorage-zone' , 'dry-dock' , 'on-the-sea' . location_status_excluded : A location status, or list of location statuses to exclude. commitment_status : A commitment status, or list of commitment statuses to filter on. Can be one of: 'committed' , 'uncommitted' , 'open' , 'unknown' . commitment_status_excluded : A commitment status, or list of commitment statuses to exclude. exclude_overlapping_entries : A boolean to only consider the latest voyage in days where two or more Voyages overlap. products : A product ID, or list of product IDs to filter on. products_excluded : A product ID, or list of product IDs to exclude. latest_products : A product ID, or list of product IDs of the latest cargo on board to filter on. latest_products_excluded : A product ID, or list of product IDs of the latest cargo on board to exclude. charterers : A charterer ID, or list of charterer IDs to filter on. charterers_excluded : A charterer ID, or list of charterer IDs to exclude. effective_controllers : A vessel effective controller ID, or list of vessel effective controller IDs to filter on. effective_controllers_excluded : A vessel effective controller ID, or list of vessel effective controller IDs to exclude. origins : An origin ID, or list of origin IDs to filter on. origins_excluded : An origin ID, or list of origin IDs to exclude. destinations : A destination ID, or list of destination IDs to filter on. destinations_excluded : A destination ID, or list of destination IDs to exclude. locations : A location ID, or list of location IDs to filter on. locations_excluded : A location ID, or list of location IDs to exclude. congestion_target_location : A congestion location ID, or list of congestion location IDs to filter on. congestion_target_location_excluded : A congestion location ID, or list of congestion location IDs to exclude. vessels : A vessel ID or vessel class, or list of vessel IDs/vessel classes to filter on. vessels_excluded : A vessel ID or vessel class, or list of vessel IDs/vessel classes to exclude. flags : A flag, or list of flags to filter on. flags_excluded : A flag, or list of flags to exclude. ice_class : An ice class, or list of ice classes to filter on. ice_class_excluded : An ice class, or list of ice classes to \u0119xclude. vessel_propulsion : A propulsion method, or list of propulsion methods to filter on. vessel_propulsion_excluded : A propulsion method, or list of propulsion methods to \u0119xclude. vessel_age_min : A number between 1 and 100 (representing years). vessel_age_max : A number between 1 and 100 (representing years). vessel_dwt_min : A number representing minimum deadweight tonnage of a vessel. vessel_dwt_max : A number representing maximum deadweight tonnage of a vessel. vessel_cbm_min : A number representing minimum cubic capacity of a vessel. vessel_cbm_max : A number representing maximum cubic capacity of a vessel. vessel_wait_time_min : A number representing a minimum number of days until a vessel becomes available. vessel_wait_time_max : A number representing a maximum number of days until a vessel becomes available. vessel_scrubbers : Either inactive 'disabled', or included 'inc' or excluded 'exc'. vessels_tags : A time bound vessel tag, or list of time bound vessel tags to filter on. vessels_tags_excluded : A time bound vessel tag, or list of time bound vessel tags to exclude. vessel_risk_level : A vessel risk level, or list of vessel risk levels to filter on. vessel_risk_level_excluded : A vessel risk level, or list of vessel risk levels to exclude. has_ship_to_ship : A boolean to show data where at least one STS transfer occurs. has_charterer : A boolean to show data where at least one charterer is specified. Returns AggregationBreakdownResult Example Top oritin countries for crude imports to Rotterdam on 8th August 2021. >>> from vortexasdk import VoyagesTopHits, Geographies, Products >>> from datetime import datetime >>> rotterdam = [g.id for g in Geographies().search(\"rotterdam\").to_list() if \"port\" in g.layer] >>> crude = [p.id for p in Products().search(\"crude\").to_list() if \"Crude\" == p.name] >>> start = datetime(2021, 8, 1) >>> end = datetime(2021, 8, 1, 23, 59) >>> search_result = VoyagesTopHits().search( ... time_min=start, ... time_max=end, ... destinations=rotterdam, ... products=crude, ... breakdown_size=5, ... breakdown_split_property=\"origin_country\" ... ).to_list() Gives the following result: [ AggregationBreakdownItem( id='b996521be9c996db', count=8, value=8.0, label='Russia' ), AggregationBreakdownItem( id='2d92cc08f22524db', count=7, value=7.0, label='United States' ), AggregationBreakdownItem( id='2aaad41b89dfad19', count=4, value=4.0, label='United Kingdom' ), AggregationBreakdownItem( id='430f0e467f3a408f', count=2, value=2.0, label='Nigeria' ), AggregationBreakdownItem( id='3eac69e760d9ec57', count=1, value=1.0, label='Egypt' ) ]","title":"Top Hits"},{"location":"entities/breakdown_item/","text":"vortexasdk.api.breakdown_item BreakdownItem BreakdownItem(__pydantic_self__, **data: Any) -> None Generic container class holding a key <> value pair, a count , and optionally a label and a breakdown of records contributing to the given value. For example, this class could hold the average speed of vessels ( value ) on 2019-01-01 ( key ), the number of vessels contributing to the this average (count) and additional information about the aggregation ( breakdown ). If the BreakdownItem is enriched by reference data (e.g. in fleet-utilisation/breakdown/origin ), key is the ID of the reference entity, label holds its name and value and count correspond to numeric values of the returned record.","title":"Breakdown Item"},{"location":"entities/cargo_movement/","text":"vortexasdk.api.cargo_movement ParentID ParentID(__pydantic_self__, **data: Any) -> None cargo_movement_id may change under certain conditions. ParentID contains an id , a previous id of the cargo movement, and a splinter_timestamp , the time at which the id change occurred. Cargo Movement Further Documentation CargoMovement CargoMovement(__pydantic_self__, **data: Any) -> None Cargo movements are the base data set the Vortexa API is centred around. Each movement represents a journey of a certain quantity of a product between places. Cargo Movement Further Documentation","title":"Cargo Movement"},{"location":"entities/corporation/","text":"vortexasdk.api.corporation Corporation Corporation(__pydantic_self__, **data: Any) -> None Represent a Corporation reference record returned by the API. CorporateEntity CorporateEntity(__pydantic_self__, **data: Any) -> None Represents a relationship between a corporation and another entity like a vessel. Corporate Entity Further Documentation","title":"Corporation"},{"location":"entities/geography/","text":"vortexasdk.api.geography BoundingBox BoundingBox(__pydantic_self__, **data: Any) -> None Polygon with list of bounding lon lat coords. Geography Geography(__pydantic_self__, **data: Any) -> None Represent a Geography reference record returned by the API. GeographyEntity GeographyEntity(__pydantic_self__, **data: Any) -> None Represents a hierarchy tree of locational data. Geography Entities Further Documentation","title":"Geography"},{"location":"entities/product/","text":"vortexasdk.api.product Product Product(__pydantic_self__, **data: Any) -> None Represent a Product reference record returned by the API. Product Further Documentation ProductEntityWithSingleLayer ProductEntityWithSingleLayer(__pydantic_self__, **data: Any) -> None Represents a single product layer of a hierarchical product tree. Further Documentation ProductEntityWithListLayer ProductEntityWithListLayer(__pydantic_self__, **data: Any) -> None Represents a single product layer of a hierarchical product tree. Further Documentation","title":"Product"},{"location":"entities/timeseries_item/","text":"vortexasdk.api.timeseries_item TimeSeriesItem TimeSeriesItem(__pydantic_self__, **data: Any) -> None Generic container class holding a key <> value pair, a count of records contributing to the given value. For example, this class could hold the total tonnage exported (value) on 2019-01-01 (key), and the count of cargo movements contributing to this tonnage aggregate, ie the number of cargo movements on this day (count).","title":"Time Series Item"},{"location":"entities/vessel/","text":"vortexasdk.api.vessel Vessel Vessel(__pydantic_self__, **data: Any) -> None Represent a Vessel reference record returned by the API. Vessels Further Documentation VesselEntity VesselEntity(__pydantic_self__, **data: Any) -> None A VesselEntity represents a vessel record used in CargoMovements. Vessel Entities Further Documentation","title":"Vessel"},{"location":"entities/vessel_availability/","text":"vortexasdk.api.vessel_availability DeclaredDestination DeclaredDestination(__pydantic_self__, **data: Any) -> None Current destination location, as reported by the available vessel VesselFixtures VesselFixtures(__pydantic_self__, **data: Any) -> None Current fixture information for the available vessel VesselAvailability VesselAvailability(__pydantic_self__, **data: Any) -> None Vessel Availability shows vessels that are available to load a given cargo at a given port within a specified time range.","title":"Vessel Availability"},{"location":"entities/voyages/","text":"vortexasdk.api.voyages CongestionBreakdownItem CongestionBreakdownItem(__pydantic_self__, **data: Any) -> None Congestion breakdown shows various stats of vessels in congestion. VoyagesVesselEntity VoyagesVesselEntity(__pydantic_self__, **data: Any) -> None A VoyagesVesselEntity represents a vessel record used in Voyages. Vessel Entities Further Documentation VoyageVesselEvent VoyageVesselEvent(__pydantic_self__, **data: Any) -> None A vessel event represents an activity that a vessel has performed during a voyage Voyage Events Further Documentation VoyageCargoEvent VoyageCargoEvent(__pydantic_self__, **data: Any) -> None Cargo events relate to the movement of cargo during the voyage. Voyage Events Further Documentation VoyageStatusEvent VoyageStatusEvent(__pydantic_self__, **data: Any) -> None Status events describe the status of the voyage at a given period. Voyage Events Further Documentation VoyageEnrichedItem VoyageEnrichedItem(__pydantic_self__, **data: Any) -> None A voyage is defined as a continuous period of time when the vessel is either laden or ballast. Each voyage is made up of multiple voyage events which describe the activity of the vessel while it is laden or ballast. Voyages Further Documentation","title":"Voyages"},{"location":"examples/0_sample_load_cargo_movements/","text":"Simple example to retrieving some sample cargo movements in a dataframe. The below script returns something similar to: events.cargo_port_unload_event.0.start_timestamp product.group.label product.grade.label quantity vessels.0.name 0 2019-10-08T00:41:00+0000 Crude Djeno 123457 AROME 1 2019-11-08T00:41:52+0000 Crude Arab Medium 99898 SCOOBYDOO 2 2019-09-30T23:49:41+0000 Crude Arab Heavy 9879878 DAVID 3 2019-12-01T01:40:00+0000 Crude Usan 999999 DUCK from datetime import datetime from vortexasdk import CargoMovements if __name__ == \"__main__\": # Query API to find all vessels that were loading on the 1st of Aug 2019 search_result = CargoMovements().search( filter_activity=\"loading_start\", filter_time_min=datetime(2019, 8, 1), filter_time_max=datetime(2019, 8, 2), ) print(\"Cargo movements successfully loaded\") # Convert search result to dataframe df = search_result.to_df() print(df.head())","title":"0 Simple Load Cargo Movements"},{"location":"examples/1_china/","text":"Let's retrieve all the VLCCs that have discharged into China in the last 3 months. The below script returns: events.cargo_port_unload_event.0.start_timestamp product.group.label product.grade.label quantity vessels.0.name 0 2019-10-08T00:41:00+0000 Crude Djeno 123457 AROME 1 2019-11-08T00:41:52+0000 Crude Arab Medium 99898 SCOOBYDOO 2 2019-09-30T23:49:41+0000 Crude Arab Heavy 9879878 DAVID 3 2019-12-01T01:40:00+0000 Crude Usan 999999 DUCK from datetime import datetime from vortexasdk import CargoMovements, Geographies, Vessels if __name__ == \"__main__\": # Find china ID china = Geographies().search(term=\"China\", exact_term_match=True).to_list()[0].id # Find the ID of all VLCCs vlccs = [ v.id for v in Vessels().search(vessel_classes=\"vlcc_plus\").to_list() ] # Query API search_result = CargoMovements().search( filter_activity=\"loading_start\", filter_vessels=vlccs, filter_destinations=china, filter_time_min=datetime(2019, 9, 29), filter_time_max=datetime(2019, 10, 30), ) # Convert search result to dataframe df = search_result.to_df()","title":"1 China VLCC Discharges"},{"location":"examples/2_crude_from_saudi_arabia_to_india/","text":"Let's find all crude cargo movements from Saudi Arabia to India that loaded in the last month. The below script returns a pd.DataFrame , similar to the table given in the movements tab of https://analytics.vortexa.com , filtering on Products: Crude with Origin: Saudi Arabia , Destination: India and Date Range: Departures in the last Month . from datetime import datetime from dateutil.relativedelta import relativedelta from vortexasdk import CargoMovements, Geographies, Products if __name__ == \"__main__\": now = datetime.utcnow() one_month_ago = now - relativedelta(months=1) # For this analysis we need the geography ID for India, and the geography ID for Saudi Arabia. We're going to # show 2 ways to retrieve geography IDs. You'll want to chose method 1 or 2 depending on your use case. # Option 1. We look up a geography with an exact matching name saudi_arabia = ( Geographies() .search(\"Saudi Arabia\", exact_term_match=True) .to_list()[0] .id ) # Option 2. We search for geographies with similar names, then pick the one we're looking for # First we find the ID for the country India. Note that when searching geographies with the term 'india', we'll # retrieve all geographies with india in the name, ie Indiana, British Indian Ocean Territory... all_geogs_with_india_in_the_name = Geographies().search(\"india\").to_list() # If running interactively, you may want to print all the names here to inspect them for yourself for g in all_geogs_with_india_in_the_name: print(g.name) # We're only interested in the country India here india = [ g.id for g in all_geogs_with_india_in_the_name if g.name == \"India\" ] # Check we've only got one ID for India assert len(india) == 1 # Let's find the Crude ID, # here we know the exact name of the product we're looking for so we set exact_term_match=True crude = Products().search(\"Crude\", exact_term_match=True).to_list()[0].id # Query the API. search_result = CargoMovements().search( filter_activity=\"loading_end\", filter_origins=saudi_arabia, filter_destinations=india, filter_products=crude, filter_time_min=one_month_ago, filter_time_max=now, ) # A complete list of available columns can be found at https://vortechsa.github.io/python-sdk/endpoints/cargo_movements/#notes # We only require a subset of available columns here required_columns = [ # A cargo movement can be carried by multiple vessels across various STS transfers. You can find all the vessels that # the cargo was onboard by inspecting the 'vessels.0', 'vessels.1' columns etc. # The 'vessels.0' columns shows the primary vessel associated with the cargo movement \"vessels.0.name\", \"vessels.0.vessel_class\", # Here we show any corporate information associated with the primary vessel \"vessels.0.corporate_entities.charterer.label\", \"vessels.0.corporate_entities.time_charterer.label\", \"vessels.0.corporate_entities.effective_controller.label\", # Show the product information and quantity \"product.group.label\", \"product.grade.label\", \"quantity\", # Is the vessel in transit, has it already discharged, or is it in floating storage? \"status\", # Show the loading Port name, and the loading timestamp \"events.cargo_port_load_event.0.location.port.label\", \"events.cargo_port_load_event.0.end_timestamp\", # Show the discharge Port name, and the discharge timestamp \"events.cargo_port_unload_event.0.location.port.label\", \"events.cargo_port_unload_event.0.end_timestamp\", ] # Convert the search result to a dataframe df = search_result.to_df(columns=required_columns) # Sort the dataframe by loading timestamp df = df.sort_values(by=[\"events.cargo_port_load_event.0.end_timestamp\"])","title":"2 crude from saudi arabia to india"},{"location":"examples/3_chinese_daily_imports/","text":"Let's retrieve the daily sum of Chinese Crude/Condensate imports, across January 2019. The below script returns: key value count 0 2019-01-01T00:00:00.000Z 1237381 9 1 2019-01-02T00:00:00.000Z 6548127 23 2 2019-01-03T00:00:00.000Z 45457617 23 3 2019-01-04T00:00:00.000Z 6467759 43 4 2019-01-05T00:00:00.000Z 7777144 4 ... from datetime import datetime from vortexasdk import CargoTimeSeries, Geographies, Products if __name__ == \"__main__\": # Find china ID, here we're only looking for geographies with the exact name China, so we set exact_term_match=True china = Geographies().search(term=\"China\", exact_term_match=True).to_list()[0].id # Find Crude/Condensates ID. # Again, we know the exact name of the product we're searching for, so we set exact_term_match=True crude_condensates = Products().search(term=\"Crude/Condensates\", exact_term_match=True).to_list()[0].id # Query API search_result = CargoTimeSeries().search( # We're only interested in movements into China filter_destinations=china, # We're looking at daily imports timeseries_frequency=\"day\", # We want 'b' for barrels here timeseries_unit=\"b\", # We're only interested in Crude/Condensates filter_products=crude_condensates, # We want all cargo movements that unloaded in January 2019 to be included filter_activity=\"unloading_start\", filter_time_min=datetime(2019, 1, 1), filter_time_max=datetime(2019, 2, 1), ) # Convert search result to dataframe df = search_result.to_df()","title":"3 chinese daily imports"},{"location":"examples/4_medium_sour_floating_storage/","text":"Let's see how much Medium-Sour Crude is in long term floating storage, in January 2019. The below script returns: key value count 0 2019-01-01T00:00:00.000Z 7381 9 1 2019-01-02T00:00:00.000Z 8127 23 2 2019-01-03T00:00:00.000Z 2333 32 3 2019-01-04T00:00:00.000Z 447759 43 4 2019-01-05T00:00:00.000Z 7777144 4 ... from datetime import datetime from docs.utils import to_markdown from vortexasdk import CargoTimeSeries, Products if __name__ == \"__main__\": # Find Medium Sour ID medium_sour = [ p.id for p in Products().search(term=\"Medium-Sour\").to_list() if p.name == \"Medium-Sour\" ] # Check we've only got one ID assert len(medium_sour) == 1 # Query API search_result = CargoTimeSeries().search( # We're looking at daily storage levels timeseries_frequency=\"day\", # We want 'b' for barrels here timeseries_unit=\"b\", # We're only interested in storage of Medium-Sour Crude filter_products=medium_sour, # We're only included in cargo's that were in floating storage filter_activity=\"storing_state\", # We're only interested in floating storage that lasted longer than 14 days timeseries_activity_time_span_min=1000 * 60 * 60 * 24 * 14, # Let's limit the search to January 2019 storage events filter_time_min=datetime(2019, 1, 1), filter_time_max=datetime(2019, 2, 1), ) # Convert search result to dataframe df = search_result.to_df() print(to_markdown(df.head()))","title":"4 medium sour floating storage"},{"location":"examples/jupyter_notebooks/","text":"Example Jupyter Notebooks The examples section of the vortexasdk GitHub repository, hosts a variety of Jupyter Notebooks that illustrate some real-life use-cases of how the SDK can be used to answer interesting industry questions, identify emerging patterns & trends and assist forecasting models. Those notebooks include: New to Python - Crude and Condensates in Floating Storage US Crude Exports Exploration & Forecasting China Oil Flows during the Covid-19 Outbreak Crude Floating Storage and its relation to Prices Financial services use cases notebook","title":"Jupyter Notebooks"},{"location":"examples/jupyter_notebooks/#example-jupyter-notebooks","text":"The examples section of the vortexasdk GitHub repository, hosts a variety of Jupyter Notebooks that illustrate some real-life use-cases of how the SDK can be used to answer interesting industry questions, identify emerging patterns & trends and assist forecasting models. Those notebooks include: New to Python - Crude and Condensates in Floating Storage US Crude Exports Exploration & Forecasting China Oil Flows during the Covid-19 Outbreak Crude Floating Storage and its relation to Prices Financial services use cases notebook","title":"Example Jupyter Notebooks"}]} \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml index ccec8741..7aa12ec8 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -2,247 +2,247 @@ None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily None - 2023-09-07 + 2023-09-11 daily \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz index 9c9b6bc7..c2423c81 100644 Binary files a/sitemap.xml.gz and b/sitemap.xml.gz differ