Skip to content

Commit

Permalink
kafka lfw OK
Browse files Browse the repository at this point in the history
  • Loading branch information
svebk committed Jan 23, 2020
1 parent 79b26ec commit d5f6570
Show file tree
Hide file tree
Showing 5 changed files with 46 additions and 40 deletions.
4 changes: 2 additions & 2 deletions cufacesearch/cufacesearch/searcher/generic_searcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ class GenericSearcher(ConfReader):
"""GenericSearcher class
"""

def __init__(self, global_conf_in, prefix=default_prefix):
def __init__(self, global_conf_in, prefix=default_prefix, pp="GenericSearcher"):
"""GenericSearcher constructor
:param global_conf_in: configuration file or dictionary
Expand Down Expand Up @@ -53,7 +53,7 @@ def __init__(self, global_conf_in, prefix=default_prefix):

self.indexed_updates = set()
super(GenericSearcher, self).__init__(global_conf_in, prefix)
self.set_pp(pp="GenericSearcher")
self.set_pp(pp=pp)

get_pretrained_model = self.get_param('get_pretrained_model')
if get_pretrained_model:
Expand Down
3 changes: 1 addition & 2 deletions cufacesearch/cufacesearch/searcher/searcher_lopqhbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,7 @@ def __init__(self, global_conf_in, prefix=default_prefix):
self.skipfailed = False
# making LOPQSearcherLMDB the default LOPQSearcher
self.lopq_searcher = "LOPQSearcherLMDB"
super(SearcherLOPQHBase, self).__init__(global_conf_in, prefix)
self.set_pp(pp="SearcherLOPQHBase")
super(SearcherLOPQHBase, self).__init__(global_conf_in, prefix=prefix, pp="SearcherLOPQHBase")

# To load pickled codes files from s3 bucket
print("[{}.load_codes: log] Starting to load codes".format(self.pp))
Expand Down
9 changes: 9 additions & 0 deletions setup/all-in-one/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,3 +65,12 @@ e.g. [https://localhost/cufacesearch/status](https://localhost/cufacesearch/status
Details about the API are provided in the [README.md](../../www/README.md) file of the `www` folder.
You can also open your browser at [https://localhost/[endpoint]/view_similar_byURL?data=[an_image_URL]](https://localhost/[endpoint]/view_similar_byURL?data=[an_image_URL]) to visualize some results.

## Cleaning up

Once you are done running things, you can clean up the docker
containers and volumes with the following commands:

```
docker -f docker-compose_(kafka/kinesis)[_monitor].yml down
docker volume prune
```
36 changes: 18 additions & 18 deletions setup/all-in-one/docker-compose_kafka.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,15 +82,15 @@ services:
environment:
# General environment variables
- input_type
- input_path
- input_nb_threads
- "image_pushing_type=${image_ingestion_type}"
- verbose
- input_path
- source_zip
- "nb_workers=${input_nb_workers}"
- "conf_name=${input_conf_name}"
# Kafka related environment variables
- input_topic
- input_consumer_group
- input_obj_stored_prefix
- images_topic
- kafka_servers
- kafka_security
Expand Down Expand Up @@ -121,18 +121,19 @@ services:
environment:
# General environment variables
- input_type
- image_ingestion_type
- update_ingestion_type
- verbose
- "conf_name=${extr_conf_name}"
- extr_type
- extr_nb_threads
- extr_check_max_delay
# Kafka related environment variables
- images_topic
- updates_topic
- kafka_servers
- kafka_security
- extr_check_consumer_group
- extr_proc_consumer_group
- updates_topic
- images_consumer_group
# Hbase related environment variables
- hbase_host
- table_sha1infos
Expand Down Expand Up @@ -175,7 +176,6 @@ services:
- input_type
- verbose
- "conf_name=${search_conf_name}"
#- "extr_type=${extr_type}"
- extr_type
- storer
# Hbase related environment variables
Expand All @@ -184,23 +184,23 @@ services:
- table_updateinfos
- batch_update_size
- column_list_sha1s
- extr_family_column
- extr_column_family
- image_info_column_family
- image_buffer_column_family
- image_buffer_column_name
- update_info_column_family
# Search related environment variables
- "model_type=${model_type}"
- "nb_train=${nb_train}"
- "nb_min_train=${nb_min_train}"
- "lopq_V=${lopq_V}"
- "lopq_M=${lopq_M}"
- "lopq_subq=${lopq_subq}"
- "reranking=${reranking}"
- model_type
- nb_train
- nb_min_train
- lopq_V
- lopq_M
- lopq_subq
- reranking
# If model_type is lopq_pca:
- "nb_train_pca=${nb_train_pca}"
- "nb_min_train_pca=${nb_min_train_pca}"
- "lopq_pcadims=${lopq_pcadims}"
- nb_train_pca
- nb_min_train_pca
- lopq_pcadims
# need to add a volume that store the search index data
volumes:
- $repo_path:$indocker_repo_path
Expand Down
34 changes: 16 additions & 18 deletions setup/all-in-one/docker-compose_kafka_monitor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,6 @@ services:
- images_topic
- kafka_servers
- kafka_security
# Kinesis related environment variables
cap_add:
- IPC_LOCK
# nothing really needs to be persistent here. no other volumes needed
Expand Down Expand Up @@ -185,17 +184,15 @@ services:
- update_ingestion_type
- verbose
- "conf_name=${extr_conf_name}"
- "extr_type=${extr_type}"
- "extr_nb_threads=${extr_nb_threads}"
- "extr_check_max_delay=${extr_check_max_delay}"
- extr_type
- extr_nb_threads
- extr_check_max_delay
# Kafka related environment variables
- images_topic
- updates_topic
- kafka_servers
- kafka_security
- images_consumer_group
#- "extr_check_consumer_group=${extr_check_consumer_group}"
#- "extr_proc_consumer_group=${extr_proc_consumer_group}"
# Hbase related environment variables
- hbase_host
- table_sha1infos
Expand Down Expand Up @@ -238,8 +235,8 @@ services:
- input_type
- verbose
- "conf_name=${search_conf_name}"
- "extr_type=${extr_type}"
- "storer=${storer}"
- extr_type
- storer
# Hbase related environment variables
- hbase_host
- table_sha1infos
Expand All @@ -252,17 +249,17 @@ services:
- image_buffer_column_name
- update_info_column_family
# Search related environment variables
- "model_type=${model_type}"
- "nb_train=${nb_train}"
- "nb_min_train=${nb_min_train}"
- "lopq_V=${lopq_V}"
- "lopq_M=${lopq_M}"
- "lopq_subq=${lopq_subq}"
- "reranking=${reranking}"
- model_type
- nb_train
- nb_min_train
- lopq_V
- lopq_M
- lopq_subq
- reranking
# If model_type is lopq_pca:
- "nb_train_pca=${nb_train_pca}"
- "nb_min_train_pca=${nb_min_train_pca}"
- "lopq_pcadims=${lopq_pcadims}"
- nb_train_pca
- nb_min_train_pca
- lopq_pcadims
# need to add a volume that store the search index data
volumes:
- $repo_path:$indocker_repo_path
Expand All @@ -283,6 +280,7 @@ services:
cu_imgsearch_net:

networks:
## Something like that should be used when adding image search to an already existing docker-compose
# external:
# name: cu_imgsearch_net
cu_imgsearch_net:
Expand Down

0 comments on commit d5f6570

Please sign in to comment.