http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/database/src/create_product_view.sql
----------------------------------------------------------------------
diff --git a/src/main/database/src/create_product_view.sql 
b/src/main/database/src/create_product_view.sql
new file mode 100644
index 0000000..471e9c3
--- /dev/null
+++ b/src/main/database/src/create_product_view.sql
@@ -0,0 +1,635 @@
+--*********************************************************************************************
+--**  Product Model 
+--**
+--**  The product model is comprised of the following data models:
+--**
+--**     Product and Granule Model
+--**        - product_granule_view
+--**            - granule_imagery
+--**            - granule
+--**        - product_operation_view
+--**            - product
+--**            - product_operation
+--**
+--**     Product Archive Model
+--**        - product_meta_history_view
+--**           - product
+--**           - product_meta_history
+--**        - product_archive_view
+--**            - product_archive
+--**            - archive_view
+--**                - product_archive
+--**                - product_archive_reference
+--**        - product_reference_view
+--**           - product
+--**           - product_reference
+--**        - product_data_day_view
+--**           - product
+--**           - product_data_day
+--**
+--**     Product Contact Model
+--**        - product_contact_view
+--**            - product_contact
+--**            - contact_provider_view (see create_imagery_provider.sql)
+--**                 - contact 
+--**                 - provider
+--**                 - provider_resource_view
+--**                     - provider
+--**                     - provider_resource
+--**
+--**     Product Elements Model
+--**        - product_element_view
+--**            - product_element
+--**            - product_element_dd_view
+--**                - product_element
+--**                - element_dd
+--**        - product_datetime_view
+--**            - product
+--**            - product_datetime
+--**        - product_character_view
+--**            - product
+--**            - product_character
+--**        - product_integer_view
+--**            - product
+--**            - product_integer
+--**        - product_real_view
+--**            - product
+--**            - product_real
+--*********************************************************************************************
+
+
+--*********************************************************************************************
+-- Product and Granule Model
+--*********************************************************************************************
+
+--------------------------------------------------
+-- product_granule_view
+--------------------------------------------------
+DROP VIEW IF EXISTS product_granule_view CASCADE;
+CREATE VIEW product_granule_view AS
+SELECT
+
+   -- granule_imagery
+   granule_imagery.product_id as product_id,
+
+   -- granule
+   string_agg(granule.id::int8::text,         ',' order by granule.id) as 
product_granule_id_list,
+   string_agg(granule.version::int8::text,    ',' order by granule.id) as 
product_granule_version_list,
+   string_agg(granule.dataset_id::int8::text, ',' order by granule.id) as 
product_granule_dataset_id_list,
+   string_agg(granule.metadata_endpoint,      ',' order by granule.id) as 
product_granule_metadata_endpoint_list,
+   string_agg(granule.remote_granule_ur,      ',' order by granule.id) as 
product_granule_remote_granule_ur_list
+
+FROM granule_imagery
+LEFT JOIN granule ON granule.id = granule_imagery.granule_id
+GROUP BY granule_imagery.product_id;
+SELECT COUNT(*) AS product_granule_view_count FROM product_granule_view;
+SELECT * FROM product_granule_view LIMIT 5;
+
+---------------------------------------------------------------------------
+-- product_operation_view
+---------------------------------------------------------------------------
+DROP VIEW IF EXISTS product_operation_view CASCADE;
+CREATE VIEW product_operation_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_operation
+   string_agg(product_operation.version::int8::text,    ',' order by 
product_operation.id) as product_operation_version_list,
+   string_agg(product_operation.agent,                  ',' order by 
product_operation.id) as product_operation_agent_list,
+   string_agg(product_operation.operation,              ',' order by 
product_operation.id) as product_operation_list,
+   string_agg(product_operation.command,                ',' order by 
product_operation.id) as product_operation_command_list,
+   string_agg(product_operation.arguments,              ',' order by 
product_operation.id) as product_operation_arguments_list,
+   string_agg(product_operation.start_time::int8::text, ',' order by 
product_operation.id) as product_operation_start_time_list,
+   string_agg(product_operation.stop_time::int8::text,  ',' order by 
product_operation.id) as product_operation_stop_time_list,
+   string_agg(('1970-01-01 00:00:00 GMT'::timestamp + 
((product_operation.start_time/1000)::text)::interval)::timestamp::text,
+                                                        ',' order by 
product_operation.id) as product_operation_start_time_string_list,
+   string_agg(('1970-01-01 00:00:00 GMT'::timestamp + 
((product_operation.stop_time/1000)::text)::interval)::timestamp::text,
+                                                        ',' order by 
product_operation.id) as product_operation_stop_time_string_list
+FROM product
+LEFT JOIN product_operation ON product_operation.product_id = product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_operation_view_count FROM product_operation_view;
+SELECT * FROM product_operation_view LIMIT 5;
+
+--*********************************************************************************************
+-- Product Archive Model 
+--*********************************************************************************************
+
+--------------------------------------------------
+-- product_meta_history_view
+--------------------------------------------------
+DROP VIEW IF EXISTS product_meta_history_view CASCADE;
+CREATE VIEW product_meta_history_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_meta_history
+   product_meta_history.version                     as 
product_meta_history_version,
+   product_meta_history.version_id                  as 
product_meta_history_version_id,
+   product_meta_history.revision_history            as 
product_meta_history_revision_history,
+   product_meta_history.last_revision_date          as 
product_meta_history_last_revision_date,
+   product_meta_history.creation_date               as 
product_meta_history_creation_date,
+   ('1970-01-01 00:00:00 GMT'::timestamp + 
((product_meta_history.last_revision_date/1000)::text)::interval)
+                                                    as 
product_meta_history_last_revision_date_string,
+   ('1970-01-01 00:00:00 GMT'::timestamp + 
((product_meta_history.creation_date/1000)::text)::interval)
+                                                    as 
product_meta_history_creation_date_string
+FROM product 
+LEFT JOIN product_meta_history ON product_meta_history.product_id = product.id
+GROUP BY product.id,
+         product_meta_history.version,
+         product_meta_history.version_id,
+         product_meta_history.revision_history,
+         product_meta_history.last_revision_date,
+         product_meta_history.creation_date;
+   
+SELECT COUNT(*) AS product_meta_history_view_count FROM 
product_meta_history_view;
+SELECT * FROM product_meta_history_view LIMIT 5;
+
+--------------------------------------------------
+-- product_archive_view
+--------------------------------------------------
+DROP VIEW IF EXISTS archive_view CASCADE;
+CREATE VIEW archive_view AS
+SELECT
+
+   -- product_archive
+   product_archive.id,
+   product_archive.product_id      as product_id,
+   product_archive.version         as version,     
+   product_archive.name            as name,     
+   product_archive.type            as type,     
+   product_archive.file_size       as file_size,     
+   product_archive.checksum        as checksum,     
+   product_archive.compress_flag   as compress_flag,     
+   product_archive.status          as status,     
+
+   -- product_archive_reference
+   string_agg(product_archive_reference.description, ';' order by 
product_archive_reference.id) as reference_descriptions,
+   string_agg(product_archive_reference.name,        ';' order by 
product_archive_reference.id) as reference_names,
+   string_agg(product_archive_reference.type,        ';' order by 
product_archive_reference.id) as reference_types,
+   string_agg(product_archive_reference.status,      ';' order by 
product_archive_reference.id) as reference_status
+
+FROM product_archive LEFT JOIN product_archive_reference ON 
product_archive_reference.product_archive_id = product_archive.id
+GROUP BY product_archive.id;
+SELECT COUNT(*) AS archive_view_count FROM archive_view;
+SELECT * FROM archive_view LIMIT 5;
+
+DROP VIEW IF EXISTS product_archive_view CASCADE;
+CREATE VIEW product_archive_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- archive_view
+   string_agg(archive_view.name,                         ',' order by 
archive_view.id) as product_archive_name_list,
+   string_agg(archive_view.type,                         ',' order by 
archive_view.id) as product_archive_type_list,
+   string_agg(archive_view.version::int8::text,          ',' order by 
archive_view.id) as product_archive_version_list,
+   string_agg(archive_view.file_size::int8::text,        ',' order by 
archive_view.id) as product_archive_file_size_list,
+   string_agg(archive_view.checksum,                     ',' order by 
archive_view.id) as product_archive_checksum_list,
+   string_agg(archive_view.compress_flag::boolean::text, ',' order by 
archive_view.id) as product_archive_compress_flag_list, 
+   string_agg(archive_view.status,                       ',' order by 
archive_view.id) as product_archive_status_list, 
+   string_agg(archive_view.reference_descriptions,       ',' order by 
archive_view.id) as product_archive_reference_description_list, 
+   string_agg(archive_view.reference_names,              ',' order by 
archive_view.id) as product_archive_reference_name_list, 
+   string_agg(archive_view.reference_types,              ',' order by 
archive_view.id) as product_archive_reference_type_list, 
+   string_agg(archive_view.reference_status,             ',' order by 
archive_view.id) as product_archive_reference_status_list
+FROM product
+LEFT JOIN archive_view ON archive_view.product_id = product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_archive_view_count FROM product_archive_view;
+SELECT * FROM product_archive_view LIMIT 5;
+
+---------------------------------------------------------------------------
+-- product_reference_view
+---------------------------------------------------------------------------
+DROP VIEW IF EXISTS product_reference_view;
+CREATE VIEW product_reference_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_reference
+   string_agg(product_reference.version::int8::text, ',' order by 
product_reference.id) as product_reference_version_list,
+   string_agg(product_reference.type,                ',' order by 
product_reference.id) as product_reference_type_list,
+   string_agg(product_reference.name,                ',' order by 
product_reference.id) as product_reference_name_list,
+   string_agg(product_reference.path,                ',' order by 
product_reference.id) as product_reference_path_list,
+   string_agg(product_reference.description,         ',' order by 
product_reference.id) as product_reference_description_list,
+   string_agg(product_reference.status,              ',' order by 
product_reference.id) as product_reference_status_list
+
+FROM product
+LEFT JOIN product_reference ON product_reference.product_id = product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_reference_view_count FROM product_reference_view;
+SELECT * FROM product_reference_view LIMIT 5;
+
+--------------------------------------------------
+-- product_data_day_view
+--------------------------------------------------
+DROP VIEW IF EXISTS product_data_day_view CASCADE;
+CREATE VIEW product_data_day_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_data_day
+   string_agg(product_data_day.version::int8::text, ',' order by 
product_data_day.id) as product_data_day_version_list,
+   string_agg(product_data_day.data_day::int8::text, ',' order by 
product_data_day.id) as product_data_day_list,
+   string_agg(('1970-01-01 00:00:00 GMT'::timestamp + 
((product_data_day.data_day/1000)::text)::interval)::timestamp::text,
+               ',' order by product_data_day.id) as 
product_data_day_string_list
+FROM product 
+LEFT JOIN product_data_day ON product_data_day.product_id = product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_data_day_view_count FROM product_data_day_view;
+SELECT * FROM product_data_day_view LIMIT 5;
+
+--*********************************************************************************************
+-- Contact Provider Model 
+--*********************************************************************************************
+
+--------------------------------------------------
+-- product_contact_view
+--------------------------------------------------
+DROP VIEW IF EXISTS product_contact_view CASCADE;
+CREATE VIEW product_contact_view AS
+SELECT
+
+   -- product_contact
+   product_contact.product_id as product_id,
+
+   -- contact_provider_view 
+   string_agg(contact_provider_view.contact_version::int8::text,               
    
+              ',' order by contact_provider_view.contact_id) as 
product_contact_version_list,
+   string_agg(contact_provider_view.contact_role,                   
+              ',' order by contact_provider_view.contact_id) as 
product_contact_role_list,
+   string_agg(contact_provider_view.contact_first_name,             
+              ',' order by contact_provider_view.contact_id) as 
product_contact_first_name_list,
+   string_agg(contact_provider_view.contact_last_name,              
+              ',' order by contact_provider_view.contact_id) as 
product_contact_last_name_list,
+   string_agg(contact_provider_view.contact_middle_name,            
+              ',' order by contact_provider_view.contact_id) as 
product_contact_middle_name_list,
+   string_agg(contact_provider_view.contact_address,                
+              ',' order by contact_provider_view.contact_id) as 
product_contact_address_list,
+   string_agg(contact_provider_view.contact_notify_type,            
+              ',' order by contact_provider_view.contact_id) as 
product_contact_notify_type_list,
+   string_agg(contact_provider_view.contact_email,                  
+              ',' order by contact_provider_view.contact_id) as 
product_contact_email_list,
+   string_agg(contact_provider_view.contact_phone,                  
+              ',' order by contact_provider_view.contact_id) as 
product_contact_phone_list,
+   string_agg(contact_provider_view.contact_fax,                    
+              ',' order by contact_provider_view.contact_id) as 
product_contact_fax_list,
+   string_agg(contact_provider_view.provider_long_name,             
+              ',' order by contact_provider_view.contact_id) as 
product_contact_provider_long_name_list,
+   string_agg(contact_provider_view.provider_short_name,            
+              ',' order by contact_provider_view.contact_id) as 
product_contact_provider_short_name_list,
+   string_agg(contact_provider_view.provider_type,                  
+              ',' order by contact_provider_view.contact_id) as 
product_contact_provider_type_list,
+   string_agg(contact_provider_view.provider_resource_description_list, 
+              ',' order by contact_provider_view.contact_id) as 
product_contact_provider_resource_descriptions_list,
+   string_agg(contact_provider_view.provider_resource_name_list,        
+              ',' order by contact_provider_view.contact_id) as 
product_contact_provider_resource_names_list,
+   string_agg(contact_provider_view.provider_resource_path_list,        
+              ',' order by contact_provider_view.contact_id) as 
product_contact_provider_resource_paths_list,
+   string_agg(contact_provider_view.provider_resource_type_list,        
+              ',' order by contact_provider_view.contact_id) as 
product_contact_provider_resource_types_list
+
+FROM product_contact
+LEFT JOIN contact_provider_view ON contact_provider_view.contact_id = 
product_contact.contact_id
+GROUP BY product_contact.product_id;
+SELECT COUNT(*) AS product_contact_view_count FROM product_contact_view;
+SELECT * FROM product_contact_view LIMIT 5;
+
+--*********************************************************************************************
+-- Products Elements Model
+--*********************************************************************************************
+
+--------------------------------------------------
+-- product_element_view
+--------------------------------------------------
+DROP VIEW IF EXISTS product_element_dd_view CASCADE;
+CREATE VIEW product_element_dd_view AS
+SELECT
+
+   -- product_element
+   product_element.id,
+   product_element.product_id,
+   product_element.version                as product_element_version,
+   product_element.obligation_flag        as product_element_obligation_flag,
+   product_element.scope                  as product_element_scope,
+
+   -- element_dd
+   string_agg(element_dd.version::int8::text,    ';' order by element_dd.id) 
as product_element_dd_versions,
+   string_agg(element_dd.type,                   ';' order by element_dd.id) 
as product_element_dd_types,
+   string_agg(element_dd.description,            ';' order by element_dd.id) 
as product_element_dd_descriptions,
+   string_agg(element_dd.scope,                  ';' order by element_dd.id) 
as product_element_dd_scopes,
+   string_agg(element_dd.long_name,              ';' order by element_dd.id) 
as product_element_dd_long_names,
+   string_agg(element_dd.short_name,             ';' order by element_dd.id) 
as product_element_dd_short_names,
+   string_agg(element_dd.max_length::int8::text, ';' order by element_dd.id) 
as product_element_dd_max_lengths
+
+FROM product_element
+LEFT JOIN element_dd ON product_element.element_id = element_dd.id
+GROUP BY product_element.id;
+SELECT COUNT(*) AS product_element_dd_view_count FROM product_element_dd_view;
+SELECT * FROM product_element_dd_view LIMIT 5;
+
+DROP VIEW IF EXISTS product_element_view CASCADE;
+CREATE VIEW product_element_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_element_dd_view
+   string_agg(product_element_dd_view.product_element_version::int8::text,
+              ',' order by product_element_dd_view.id) as 
product_element_version_list,
+   
string_agg(product_element_dd_view.product_element_obligation_flag::boolean::text,
+              ',' order by product_element_dd_view.id) as 
product_element_obligation_flag_list,
+   string_agg(product_element_dd_view.product_element_scope,
+              ',' order by product_element_dd_view.id) as 
product_element_scope_list,
+   string_agg(product_element_dd_view.product_element_dd_versions,             
      
+              ',' order by product_element_dd_view.id) as 
product_element_dd_version_list,
+   string_agg(product_element_dd_view.product_element_dd_types,                
   
+              ',' order by product_element_dd_view.id) as 
product_element_dd_type_list,
+   string_agg(product_element_dd_view.product_element_dd_descriptions,         
   
+              ',' order by product_element_dd_view.id) as 
product_element_dd_description_list,
+   string_agg(product_element_dd_view.product_element_dd_scopes,               
   
+              ',' order by product_element_dd_view.id) as 
product_element_dd_scope_list,
+   string_agg(product_element_dd_view.product_element_dd_long_names,           
   
+              ',' order by product_element_dd_view.id) as 
product_element_dd_long_name_list,
+   string_agg(product_element_dd_view.product_element_dd_short_names,          
   
+              ',' order by product_element_dd_view.id) as 
product_element_dd_short_name_list,
+   string_agg(product_element_dd_view.product_element_dd_max_lengths, 
+              ',' order by product_element_dd_view.id) as 
product_element_dd_max_length_list
+
+FROM product
+LEFT JOIN product_element_dd_view ON product_element_dd_view.product_id = 
product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_element_view_count FROM product_element_view;
+SELECT * FROM product_element_view LIMIT 5;
+
+--------------------------------------------------
+-- product_datetime_view
+--------------------------------------------------
+
+DROP VIEW IF EXISTS product_datetime_view CASCADE;
+CREATE VIEW product_datetime_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_datetime
+   string_agg(product_datetime.version::int8::text,
+              ',' order by product_datetime.id) as 
product_datetime_version_list,
+   string_agg(product_datetime.value_long::int8::text,
+              ',' order by product_datetime.id) as product_datetime_value_list,
+   string_agg(('1970-01-01 00:00:00 GMT'::timestamp +
+              
((product_datetime.value_long/1000)::text)::interval)::timestamp::text,
+              ',' order by product_datetime.id) as 
product_datetime_value_string_list
+
+FROM product
+LEFT JOIN product_datetime ON product_datetime.product_id = product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_datetime_view_count FROM product_datetime_view;
+SELECT * FROM product_datetime_view LIMIT 5;
+
+--------------------------------------------------
+-- product_character_view
+--------------------------------------------------
+DROP VIEW IF EXISTS product_character_view CASCADE;
+CREATE VIEW product_character_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_character
+   string_agg(product_character.version::int8::text,
+              ',' order by product_character.id) as 
product_character_version_list,
+   string_agg(product_character.value,
+              ',' order by product_character.id) as 
product_character_value_list
+
+FROM product
+LEFT JOIN product_character ON product_character.product_id = product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_character_view_count FROM product_character_view;
+SELECT * FROM product_character_view LIMIT 5;
+
+--------------------------------------------------
+-- product_integer_view
+--------------------------------------------------
+DROP VIEW IF EXISTS product_integer_view CASCADE;
+CREATE VIEW product_integer_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_integer
+   string_agg(product_integer.version::int8::text,
+              ',' order by product_integer.id) as product_integer_version_list,
+   string_agg(product_integer.units,
+              ',' order by product_integer.id) as product_integer_units_list,
+   string_agg(product_integer.value::int::text,
+              ',' order by product_integer.id) as product_integer_value_list
+
+FROM product
+LEFT JOIN product_integer ON product_integer.product_id = product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_integer_view_count FROM product_integer_view;
+SELECT * FROM product_integer_view LIMIT 5;
+
+--------------------------------------------------
+-- product_real_view
+--------------------------------------------------
+DROP VIEW IF EXISTS product_real_view CASCADE;
+CREATE VIEW product_real_view AS
+SELECT
+
+   -- product
+   product.id as product_id,
+
+   -- product_real
+   string_agg(product_real.version::int8::text,
+              ',' order by product_real.id) as product_real_version_list,
+   string_agg(product_real.units,
+              ',' order by product_real.id) as product_real_units_list,
+   string_agg(product_real.value::numeric::text,
+              ',' order by product_real.id) as product_real_value_list
+
+FROM product
+LEFT JOIN product_real ON product_real.product_id = product.id
+GROUP BY product.id;
+SELECT COUNT(*) AS product_real_view_count FROM product_real_view;
+SELECT * FROM product_real_view LIMIT 5;
+
+
+--*********************************************************************************************
+-- Product
+--*********************************************************************************************
+
+-----------------------------------------------------------------------------------------------
+-- product_view
+-----------------------------------------------------------------------------------------------
+DROP VIEW IF EXISTS product_view CASCADE;
+CREATE VIEW product_view AS 
+SELECT 
+
+   -- product
+   product.id,
+   product.id as product_id, 
+   product.pt_id        as product_pt_id,
+   product.partial_id   as product_partial_id,
+   product.version      as product_version,
+   product.revision     as product_revision,
+   product.name         as product_name,
+   product.rel_path     as product_rel_path,
+   product.root_path    as product_root_path,
+   product.status       as product_status,
+   product.start_time   as product_start_time,
+   product.stop_time    as product_stop_time,
+   product.create_time  as product_create_time,
+   product.archive_time as product_archive_time,
+   '1970-01-01 00:00:00 GMT'::timestamp + 
((product.start_time/1000)::text)::interval   AS product_start_time_string,
+   '1970-01-01 00:00:00 GMT'::timestamp + 
((product.stop_time/1000)::text)::interval    AS product_stop_time_string,
+   '1970-01-01 00:00:00 GMT'::timestamp + 
((product.create_time/1000)::text)::interval  AS product_create_time_string,
+   '1970-01-01 00:00:00 GMT'::timestamp + 
((product.archive_time/1000)::text)::interval AS product_archive_time_string,
+
+   -- product_granule_view
+   product_granule_id_list,
+   product_granule_version_list,
+   product_granule_dataset_id_list,
+   product_granule_metadata_endpoint_list,
+   product_granule_remote_granule_ur_list,
+
+   -- product_operation_view
+   product_operation_version_list,
+   product_operation_agent_list,
+   product_operation_list,
+   product_operation_command_list,
+   product_operation_arguments_list,
+   product_operation_start_time_list,
+   product_operation_stop_time_list,
+   product_operation_start_time_string_list,
+   product_operation_stop_time_string_list,
+
+   -- product_meta_history_view
+   product_meta_history_version,
+   product_meta_history_version_id,
+   product_meta_history_revision_history,
+   product_meta_history_last_revision_date,
+   product_meta_history_creation_date,
+   product_meta_history_last_revision_date_string,
+   product_meta_history_creation_date_string,
+
+   -- product_archive_view
+   product_archive_name_list,
+   product_archive_type_list,
+   product_archive_version_list,
+   product_archive_file_size_list,
+   product_archive_checksum_list,
+   product_archive_compress_flag_list,
+   product_archive_status_list,
+   product_archive_reference_description_list,
+   product_archive_reference_name_list,
+   product_archive_reference_type_list,
+   product_archive_reference_status_list,
+
+   -- product_reference_view
+   product_reference_version_list,
+   product_reference_type_list,
+   product_reference_name_list,
+   product_reference_path_list,
+   product_reference_description_list,
+   product_reference_status_list,
+
+   -- product_data_day
+   product_data_day_version_list,
+   product_data_day_list,
+   product_data_day_string_list,
+
+   -- product_contact_view
+   product_contact_role_list,
+   product_contact_version_list,
+   product_contact_first_name_list,
+   product_contact_last_name_list,
+   product_contact_middle_name_list,
+   product_contact_address_list,
+   product_contact_notify_type_list,
+   product_contact_email_list,
+   product_contact_phone_list,
+   product_contact_fax_list,
+   product_contact_provider_long_name_list,
+   product_contact_provider_short_name_list,
+   product_contact_provider_type_list,
+   product_contact_provider_resource_descriptions_list,
+   product_contact_provider_resource_names_list,
+   product_contact_provider_resource_paths_list,
+   product_contact_provider_resource_types_list,
+
+   -- product_element_view
+   product_element_version_list,
+   product_element_obligation_flag_list,
+   product_element_scope_list,
+   product_element_dd_version_list,
+   product_element_dd_type_list,
+   product_element_dd_description_list,
+   product_element_dd_scope_list,
+   product_element_dd_long_name_list,
+   product_element_dd_short_name_list,
+   product_element_dd_max_length_list,
+
+   -- product_datetime_view
+   product_datetime_version_list,
+   product_datetime_value_list,
+   product_datetime_value_string_list,
+
+   -- product_character_view
+   product_character_version_list,
+   product_character_value_list,
+
+   -- product_integer_view
+   product_integer_version_list,
+   product_integer_value_list,
+   product_integer_units_list,
+
+   -- product_real_view
+   product_real_version_list,
+   product_real_value_list,
+   product_real_units_list
+
+FROM
+   product,
+   product_granule_view,
+   product_operation_view,
+   product_meta_history_view,
+   product_archive_view,
+   product_reference_view,
+   product_data_day_view,
+   product_contact_view,
+   product_element_view,
+   product_datetime_view,
+   product_character_view,
+   product_integer_view,
+   product_real_view
+WHERE
+   product.id = product_granule_view.product_id AND
+   product.id = product_operation_view.product_id AND
+   product.id = product_meta_history_view.product_id AND
+   product.id = product_archive_view.product_id AND
+   product.id = product_reference_view.product_id AND
+   product.id = product_data_day_view.product_id AND
+   product.id = product_contact_view.product_id AND
+   product.id = product_element_view.product_id AND
+   product.id = product_datetime_view.product_id AND
+   product.id = product_character_view.product_id AND
+   product.id = product_integer_view.product_id AND
+   product.id = product_real_view.product_id;
+
+SELECT COUNT(*) AS product_view_count FROM product_view;
+SELECT * FROM product_view LIMIT 5;

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/config.conf
----------------------------------------------------------------------
diff --git a/src/main/python/config.conf b/src/main/python/config.conf
new file mode 100644
index 0000000..194b517
--- /dev/null
+++ b/src/main/python/config.conf
@@ -0,0 +1,3 @@
+[server]
+port=8890
+host=localhost

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/edge-env.bash
----------------------------------------------------------------------
diff --git a/src/main/python/edge-env.bash b/src/main/python/edge-env.bash
new file mode 100644
index 0000000..8ddcd34
--- /dev/null
+++ b/src/main/python/edge-env.bash
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+if [ -n "$PYTHONPATH" ]; then
+    export PYTHONPATH=${PYTHONPATH}:${PWD}/libraries
+else
+    export PYTHONPATH=${PWD}/libraries
+fi

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/edge-env.csh
----------------------------------------------------------------------
diff --git a/src/main/python/edge-env.csh b/src/main/python/edge-env.csh
new file mode 100644
index 0000000..1c42c52
--- /dev/null
+++ b/src/main/python/edge-env.csh
@@ -0,0 +1,7 @@
+#!/bin/csh
+
+if $?PYTHONPATH then
+    setenv PYTHONPATH ${PYTHONPATH}:${PWD}/libraries
+else
+    setenv PYTHONPATH ${PWD}/libraries
+endif

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/__init__.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/__init__.py 
b/src/main/python/libraries/edge/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/dateutility.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/dateutility.py 
b/src/main/python/libraries/edge/dateutility.py
new file mode 100644
index 0000000..a0e519d
--- /dev/null
+++ b/src/main/python/libraries/edge/dateutility.py
@@ -0,0 +1,57 @@
+from datetime import date, datetime, timedelta
+import dateutil.parser
+import calendar
+"""
+Utility class for date and time conversion.
+"""
+class DateUtility(object):
+    
+    RFC_822_GMT_FORMAT = "%a, %d %b %Y %H:%M:%S GMT"
+    
+    @staticmethod
+    def convertTimeLongToIso(time):
+        isoTime = ''
+        try:
+            isoTime = datetime.utcfromtimestamp(float(time) / 
1000).isoformat() + 'Z'
+        except ValueError:
+            pass
+        return isoTime
+    
+    @staticmethod
+    def convertISOToUTCTimestamp(isoTime):
+        try:
+            #parse ISO date to datetime object
+            dt = dateutil.parser.parse(isoTime)
+            
+            #return timestamp in milliseconds
+            return calendar.timegm(dt.utctimetuple()) * 1000
+        except:
+            return None
+    
+    @staticmethod
+    def pastDateRFC822(hoursAgo):
+        return (datetime.utcnow() - 
timedelta(hours=hoursAgo)).strftime(DateUtility.RFC_822_GMT_FORMAT)
+    
+    @staticmethod
+    def convertTimeLongToRFC822(time):
+        return DateUtility.convertTimeLong(time, 
DateUtility.RFC_822_GMT_FORMAT)
+    
+    @staticmethod
+    def convertTimeLong(time, format):
+        strTime = ''
+        try:
+            strTime = datetime.utcfromtimestamp(float(time) / 
1000).strftime(format)
+        except ValueError:
+            pass
+        return strTime
+
+    @staticmethod
+    def convertISOTime(isoTime, format):
+        try:
+            #parse ISO date to datetime object
+            dt = dateutil.parser.parse(isoTime)
+            
+            #return timestamp in specified format
+            return dt.strftime(format)
+        except:
+            return None

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/elasticsearch/__init__.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/elasticsearch/__init__.py 
b/src/main/python/libraries/edge/elasticsearch/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/elasticsearch/datasetwriter.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/elasticsearch/datasetwriter.py 
b/src/main/python/libraries/edge/elasticsearch/datasetwriter.py
new file mode 100644
index 0000000..34dd063
--- /dev/null
+++ b/src/main/python/libraries/edge/elasticsearch/datasetwriter.py
@@ -0,0 +1,192 @@
+from types import *
+import json
+import logging
+import urllib
+
+import requestresponder
+from edge.dateutility import DateUtility
+from edge.httputility import HttpUtility
+from edge.opensearch.responsewriter import ResponseWriter
+import re
+
+class DatasetWriter(ResponseWriter):
+    def __init__(self, configFilePath):
+        super(DatasetWriter, self).__init__(configFilePath)
+        self.datasets = []
+
+    def get(self, requestHandler):
+        super(DatasetWriter, self).get(requestHandler)
+        #searchParameters = {}
+        #logging.debug('uri: '+str(requestHandler.request.headers))
+
+        startIndex = 0
+        try:
+            startIndex = requestHandler.get_argument('startIndex')
+        except:
+            pass
+        self.searchParameters['startIndex'] = startIndex
+
+        entriesPerPage = self._configuration.getint('solr', 'entriesPerPage')
+        try:
+            entriesPerPage = requestHandler.get_argument('itemsPerPage')
+            #cap entries per age at 400
+            if (int(entriesPerPage) > 400):
+                entriesPerPage = 400
+        except:
+            pass
+        self.searchParameters['itemsPerPage'] = entriesPerPage
+
+        #pretty = True
+        try:
+            if requestHandler.get_argument('pretty').lower() == 'false':
+                self.pretty = False
+                self.searchParameters['pretty'] = 'false'
+        except:
+            pass
+
+        try:
+            if requestHandler.get_argument('full').lower() == 'true':
+                self.searchParameters['full'] = 'true'
+        except:
+            pass
+        
+        try:
+            self.searchParameters['format'] = 
requestHandler.get_argument('format')
+        except:
+            pass
+
+        parameters = ['startTime', 'endTime', 'keyword', 'identifier', 
'shortName', 'instrument', 'platform', 'fileFormat', 'status', 'processLevel', 
'sortBy', 'bbox', 'allowNone']
+        #variables = {}
+        for parameter in parameters:
+            try:
+                value = requestHandler.get_argument(parameter)
+                self.variables[parameter] = value
+                self.searchParameters[parameter] = value
+            except:
+                pass
+
+        if 'keyword' in self.variables:
+            self.variables['keyword'] = self.variables['keyword'].replace('*', 
'')
+            self.variables['keyword'] = self.variables['keyword'].lower()
+        """
+        else:
+            variables['keyword'] = '""'
+        """
+        #If generating OpenSearch response, need to make additional call to 
solr
+        #to determine which datasets have granules
+        try:
+            if 'search' in requestHandler.request.path:
+                callback = self._getHasGranuleResponseCallback(startIndex, 
entriesPerPage)
+                self._getHasGranuleResponse(callback)
+            else:
+                self._getResponse(startIndex, entriesPerPage, self.variables)
+        except:
+            logging.exception('Failed to get solr response.')
+        """
+        searchText = ''
+        if 'keyword' in variables:
+            searchText = variables['keyword']
+        openSearchResponse = self._generateOpenSearchResponse(
+            solrResponse,
+            searchText,
+            self._configuration.get('service', 'url') + 
requestHandler.request.path,
+            searchParameters,
+            pretty
+        )
+
+        requestHandler.set_header("Content-Type", "application/xml")
+        #requestHandler.set_header("Content-Type", "application/rss+xml")
+        #requestHandler.write(solrResponse)
+        requestHandler.write(openSearchResponse)
+        """
+
+    def _getResponse(self, startIndex, entriesPerPage, variables):
+        query = self._constructSolrQuery(startIndex, entriesPerPage, variables)
+        url = self._configuration.get('solr', 'datasetUrl')
+
+        httpUtility = HttpUtility()
+        httpUtility.getResponse(url+'/_search/?'+query, self._onSolrResponse)
+
+    def _constructSolrQuery(self, startIndex, entriesPerPage, variables):
+        queries = []
+        sort = None
+        filterQuery = None
+        for key, value in variables.iteritems():
+            #query = ''
+            if key == 'startTime':
+                startTime = DateUtility.convertISOToUTCTimestamp(value)
+                if startTime is not None:
+                    query = 'stop_time:'
+                    query += '['+str(startTime)+'%20TO%20*]'
+                    queries.append(query)
+            elif key == 'endTime':
+                stopTime = DateUtility.convertISOToUTCTimestamp(value)
+                if stopTime is not None:
+                    query = 'start_time:'
+                    query += '[*%20TO%20'+str(stopTime)+']'
+                    queries.append(query)
+            elif key == 'keyword':
+                newValue = urllib.quote(value)
+
+                query = newValue
+                queries.append(query)
+            elif key == 'identifier':
+                query = 'identifier:'+self._urlEncodeSolrQueryValue(value)
+                queries.append(query)
+            elif key == 'shortName':
+                query = 
'Dataset-ShortName-Full:'+self._urlEncodeSolrQueryValue(value)
+                queries.append(query)
+            elif key == 'platform':
+                query = 'platform:'+self._urlEncodeSolrQueryValue(value)
+                queries.append(query)
+            elif key == 'instrument':
+                query = 'instrument:'+self._urlEncodeSolrQueryValue(value)
+                queries.append(query)
+            elif key == 'fileFormat':
+                query = 
'DatasetPolicy-DataFormat-LowerCased:'+self._urlEncodeSolrQueryValue(value)
+                queries.append(query)
+            elif key == 'status':
+                query = 
'DatasetPolicy-AccessType-LowerCased:'+self._urlEncodeSolrQueryValue(value)
+                queries.append(query)
+            elif key == 'processLevel':
+                query = 'Dataset-ProcessingLevel-LowerCased:'+value
+                queries.append(query)
+            elif key == 'sortBy':
+                sortByMapping = {'timeDesc': 'start_time:desc', 'timeAsc': 
'start_time:asc'}
+                if value in sortByMapping.keys():
+                    sort = sortByMapping[value]
+            elif key == 'bbox':
+                filterQuery = self._constructBoundingBoxQuery(value)
+
+            #if query != '':
+            #    queries.append('%2B'+query)
+
+        if len(queries) == 0:
+            queries.append('*')
+
+        query = 
'q='+'+AND+'.join(queries)+'&from='+str(startIndex)+'&size='+str(entriesPerPage)
+        if sort is not None:
+            query += '&sort=' + sort
+        if filterQuery is not None:
+            query += '&' + filterQuery
+        logging.debug('solr query: '+query)
+        
+        return query
+    
+    def _getHasGranuleResponse(self, callback):
+        url = self._configuration.get('solr', 'granuleUrl')
+
+        httpUtility = HttpUtility()
+        return httpUtility.getResponse(url+'/_search', callback, '{"query" : 
{"match_all" : {}}, "size" : 0, "facets" : { "identifier" : { "terms" : 
{"field" : "identifier"}}}}')
+    
+    def _getHasGranuleResponseCallback(self, startIndex, entriesPerPage):
+        def onSolrHasGranuleResponse(response):
+            try:
+                solrJson = json.loads(response.body)
+                logging.debug("Got response for dataset facet")
+                facets = solrJson['facets']['identifier']['terms']
+                self.datasets = [facet['term'] for facet in facets]
+                self._getResponse(startIndex, entriesPerPage, self.variables)
+            except:
+                logging.exception('Failed to get solr response.')
+        return onSolrHasGranuleResponse

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/elasticsearch/granulewriter.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/elasticsearch/granulewriter.py 
b/src/main/python/libraries/edge/elasticsearch/granulewriter.py
new file mode 100644
index 0000000..d999fe1
--- /dev/null
+++ b/src/main/python/libraries/edge/elasticsearch/granulewriter.py
@@ -0,0 +1,142 @@
+from types import *
+import logging
+import urllib
+import json
+
+from edge.opensearch.responsewriter import ResponseWriter
+from edge.dateutility import DateUtility
+from edge.httputility import HttpUtility
+import re
+
+class GranuleWriter(ResponseWriter):
+    def __init__(self, configFilePath, requiredParams = None):
+        super(GranuleWriter, self).__init__(configFilePath, requiredParams)
+        self.startIndex = 0
+        self.entriesPerPage = self._configuration.getint('solr', 
'entriesPerPage')
+
+    def get(self, requestHandler):
+        super(GranuleWriter, self).get(requestHandler)
+        #searchParameters = {}
+        #logging.debug('uri: '+str(requestHandler.request.headers))
+        
+        #startIndex = 0
+        try:
+            self.startIndex = requestHandler.get_argument('startIndex')
+        except:
+            pass
+        self.searchParameters['startIndex'] = self.startIndex
+
+        #entriesPerPage = self._configuration.getint('solr', 'entriesPerPage')
+        try:
+            self.entriesPerPage = requestHandler.get_argument('itemsPerPage')
+            #cap entries per age at 400
+            if (int(self.entriesPerPage) > 400):
+                self.entriesPerPage = 400
+        except:
+            pass
+        self.searchParameters['itemsPerPage'] = self.entriesPerPage
+
+        #pretty = True
+        try:
+            if requestHandler.get_argument('pretty').lower() == 'false':
+                self.pretty = False
+                self.searchParameters['pretty'] = 'false'
+        except:
+            pass
+
+        try:
+            if requestHandler.get_argument('full').lower() == 'true':
+                self.searchParameters['full'] = 'true'
+        except:
+            pass
+        
+        try:
+            self.searchParameters['format'] = 
requestHandler.get_argument('format')
+        except:
+            pass
+
+        parameters = ['startTime', 'endTime', 'keyword', 'name', 'identifier', 
'shortName', 'bbox', 'sortBy']
+        #variables = {}
+        for parameter in parameters:
+            try:
+                value = requestHandler.get_argument(parameter)
+                self.variables[parameter] = value
+                self.searchParameters[parameter] = value
+            except:
+                pass
+
+        if 'keyword' in self.variables:
+            self.variables['keyword'] = self.variables['keyword'].replace('*', 
'')
+            self.variables['keyword'] = self.variables['keyword'].lower()
+
+        try:
+            self._getSolrResponse(self.startIndex, self.entriesPerPage, 
self.variables)
+        except:
+            logging.exception('Failed to get solr response.')
+
+    def _getSolrResponse(self, startIndex, entriesPerPage, variables):
+        query = self._constructSolrQuery(startIndex, entriesPerPage, variables)
+        url = self._configuration.get('solr', 'granuleUrl')
+
+        httpUtility = HttpUtility()
+        httpUtility.getResponse(url+'/_search', self._onSolrResponse, query)
+
+    def _constructSolrQuery(self, startIndex, entriesPerPage, variables):
+        #set default sort order
+        sort='desc'
+        filterQuery = None
+        queries = []
+        for key, value in variables.iteritems():
+            #query = ''
+            if key == 'startTime':
+                startTime = DateUtility.convertISOToUTCTimestamp(value)
+                if startTime is not None:
+                    query = 'stop_time:'
+                    query += '['+str(startTime)+' TO *]'
+                    queries.append(query)
+            elif key == 'endTime':
+                stopTime = DateUtility.convertISOToUTCTimestamp(value)
+                if stopTime is not None:
+                    query = 'start_time:'
+                    query += '[* TO '+str(stopTime)+']'
+                    queries.append(query)
+            elif key == 'keyword':
+                newValue = urllib.quote(value)
+
+                query = 'SearchableText-LowerCased:('+newValue+')'
+                queries.append(query)
+            elif key == 'identifier':
+                query = 'identifier:"'+value+'"'
+                queries.append(query)
+            elif key == 'shortName':
+                query = 
'Dataset-ShortName-Full:'+self._urlEncodeSolrQueryValue(value)
+                queries.append(query)
+            elif key == 'name':
+                query = 'name:"'+value+'"'
+                queries.append(query)
+            elif key == 'granuleIds':
+                granuleIds = []
+                for granuleId in value:
+                    granuleIds.append(str(granuleId))
+                query = 'Granule-Id:('+'+OR+'.join(granuleIds)+')'
+                queries.append(query)
+
+                startIndex = 0
+            elif key == 'sortBy':
+                sortByMapping = {'timeAsc': 'asc'}
+                if value in sortByMapping.keys():
+                    sort = sortByMapping[value]
+            elif key == 'bbox':
+                filterQuery = self._constructBoundingBoxQuery(value)
+            #if query != '':
+            #    queries.append('%2B'+query)
+
+        if len(queries) == 0:
+            queries.append('*')
+
+        query = 
'q='+'+AND+'.join(queries)+'&from='+str(startIndex)+'&size='+str(entriesPerPage)
+        if filterQuery is not None:
+            query += '&' + filterQuery
+        logging.debug('solr query: '+query)
+        
+        return json.dumps({'query' : {'filtered' : { 'query' : {'query_string' 
: {'query' : ' AND '.join(queries)}}, 'filter' : {'term' : {'status' : 
'online'}}}}, 'from' : startIndex, 'size' : entriesPerPage, 'sort' : 
[{'start_time' : {'order' : sort}}]})

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/elasticsearch/opensearch/__init__.py
----------------------------------------------------------------------
diff --git 
a/src/main/python/libraries/edge/elasticsearch/opensearch/__init__.py 
b/src/main/python/libraries/edge/elasticsearch/opensearch/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/elasticsearch/opensearch/atomresponsebyelasticsearch.py
----------------------------------------------------------------------
diff --git 
a/src/main/python/libraries/edge/elasticsearch/opensearch/atomresponsebyelasticsearch.py
 
b/src/main/python/libraries/edge/elasticsearch/opensearch/atomresponsebyelasticsearch.py
new file mode 100644
index 0000000..a79c9c6
--- /dev/null
+++ 
b/src/main/python/libraries/edge/elasticsearch/opensearch/atomresponsebyelasticsearch.py
@@ -0,0 +1,87 @@
+import json
+import urllib
+
+from edge.opensearch.atomresponse import AtomResponse
+from collections import defaultdict
+
+class AtomResponseByElasticsearch(AtomResponse):
+    def __init__(self):
+        super(AtomResponseByElasticsearch, self).__init__()
+        self.addNamespace("gibs", "http://gibs.jpl.nasa.gov/opensearch/";)
+
+    def generate(self, response, pretty=False):
+        self._populate(response)
+        return super(AtomResponseByElasticsearch, self).generate(pretty)
+
+    def _populate(self, response):
+        self._populateChannel(response)
+
+        if response is None:
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'totalResults', 'value': 1}
+            )
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'startIndex', 'value': 1}
+            )
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'itemsPerPage', 'value': 1}
+            )
+            self.parameters['startIndex'] = 0
+            url = self.link + '?' + urllib.urlencode(self.parameters)
+            self.variables.append({'name': 'link', 'attribute': {'href': url, 
'rel': 'self', 'type': 'application/atom+xml'}})
+            self.variables.append({'name': 'link', 'attribute': {'href': url, 
'rel': 'first', 'type': 'application/atom+xml'}})
+            item = [
+                {'name': 'title', 'value': 'Error'},
+                {'name': 'content', 'value': 'error'}
+            ]
+            self.items.append(item)
+        else:
+            #logging.debug(response)
+            jsonResponse = json.loads(response)
+            numFound = int(jsonResponse['hits']['total'])
+            start = int(self.parameters['startIndex'])
+            rows = int(self.parameters['itemsPerPage'])
+
+            self.parameters['startIndex'] = start
+            self.variables.append({'name': 'link', 'attribute': {'href': 
self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'self', 'type': 
'application/atom+xml'}})
+            self.parameters['startIndex'] = 0
+            self.variables.append({'name': 'link', 'attribute': {'href': 
self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'first', 'type': 
'application/atom+xml'}})
+            if start > 0:
+                if (start - rows > 0):
+                    self.parameters['startIndex'] = start - rows
+                self.variables.append({'name': 'link', 'attribute': {'href': 
self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'previous', 'type': 
'application/atom+xml'}})
+            if start + rows < numFound:
+                self.parameters['startIndex'] = start + rows
+                self.variables.append({'name': 'link', 'attribute': {'href': 
self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'next', 'type': 
'application/atom+xml'}})
+            
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'totalResults', 'value': 
numFound}
+            )
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'startIndex', 'value': 
start}
+            )
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'itemsPerPage', 'value': 
rows}
+            )
+
+            for doc in jsonResponse['hits']['hits']:
+                item = []
+                self._populateItem(response, doc, item)
+                self.items.append(item)
+
+    def _populateChannel(self, response):
+        pass
+
+    def _populateItem(self, response, doc, item):
+        pass
+    
+    def _populateItemWithAllMetadata(self, doc, item):
+        for docKey in doc.keys():
+            if isinstance(doc[docKey], list):
+                for child in doc[docKey]:
+                    childItem = []
+                    for childKey in child.keys():
+                        childItem.append({'namespace': 'gibs', 'name': 
childKey, 'value': child[childKey]})
+                    item.append({'namespace': 'gibs', 'name': docKey, 'value': 
childItem})
+            else:
+                item.append({'namespace': 'gibs', 'name': docKey, 'value': 
doc[docKey]})

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/elasticsearch/opensearch/datasetatomresponse.py
----------------------------------------------------------------------
diff --git 
a/src/main/python/libraries/edge/elasticsearch/opensearch/datasetatomresponse.py
 
b/src/main/python/libraries/edge/elasticsearch/opensearch/datasetatomresponse.py
new file mode 100644
index 0000000..a8d10d8
--- /dev/null
+++ 
b/src/main/python/libraries/edge/elasticsearch/opensearch/datasetatomresponse.py
@@ -0,0 +1,79 @@
+import logging
+import datetime
+import urllib
+
+from edge.elasticsearch.opensearch.atomresponsebyelasticsearch import 
AtomResponseByElasticsearch
+from edge.dateutility import DateUtility
+
+class DatasetAtomResponse(AtomResponseByElasticsearch):
+    def __init__(self, portalUrl, host, url, datasets):
+        super(DatasetAtomResponse, self).__init__()
+        self.portalUrl = portalUrl
+        self.host = host
+        self.url = url
+        self.datasets = datasets
+
+    def _populateChannel(self, solrResponse):
+        self.variables.append({'name': 'link', 'attribute': {'href': 
self.url+self.searchBasePath+'podaac-granule-osd.xml', 'rel': 'search', 'type': 
'application/opensearchdescription+xml' }})
+
+    def _populateItem(self, solrResponse, doc, item):
+        persistentId = doc['_source']['identifier']
+        idTuple = ('identifier', persistentId)
+        """
+        if persistentId == '':
+            idTuple = ('shortName', doc['Dataset-ShortName'][0])
+        """
+        item.append({'name': 'title', 'value': doc['_source']['title']})
+        item.append({'name': 'content', 'value': 
doc['_source']['description']})
+        
+        item.append({'name': 'link', 'attribute': {'href': self.url + 
self.searchBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('full', 
'true')])), 'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS 
Metadata' }})
+        """
+        item.append({'name': 'link', 'attribute': {'href': self.url + 
self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 
'iso')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 
Metadata' }})
+        item.append({'name': 'link', 'attribute': {'href': self.url + 
self.metadataBasePath + 'dataset?' + urllib.urlencode(dict([idTuple, ('format', 
'gcmd')])), 'rel': 'enclosure', 'type': 'text/xml', 'title': 'GCMD Metadata' }})
+        """
+        #Only generate granule search link if dataset has granules
+        if (doc['_source']['identifier'].lower() in self.datasets):
+            supportedGranuleParams = dict([(key,value) for key,value in 
self.parameters.iteritems() if key in ['bbox', 'startTime', 'endTime']])
+            supportedGranuleParams['identifier'] = persistentId
+            item.append({'name': 'link', 'attribute': {'href': self.url + 
self.searchBasePath + 'granule?' + urllib.urlencode(supportedGranuleParams), 
'rel': 'search', 'type': 'application/atom+xml', 'title': 'Product Search' }})
+        """
+        if 'Dataset-ImageUrl' in doc and doc['Dataset-ImageUrl'][0] != '':
+            item.append({'name': 'link', 'attribute': {'href': 
doc['Dataset-ImageUrl'][0], 'rel': 'enclosure', 'type': 'image/jpg', 'title': 
'Thumbnail' }})
+        
+        if 'DatasetLocationPolicy-Type' in doc and 
'DatasetLocationPolicy-BasePath' in doc:
+            url = dict(zip(doc['DatasetLocationPolicy-Type'], 
doc['DatasetLocationPolicy-BasePath']))
+            if 'LOCAL-OPENDAP' in url:
+                item.append({'name': 'link', 'attribute': {'href': 
url['LOCAL-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 
'OPeNDAP URL' }})
+            elif 'REMOTE-OPENDAP' in url:
+                item.append({'name': 'link', 'attribute': {'href': 
url['REMOTE-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 'title': 
'OPeNDAP URL' }})
+            if 'LOCAL-FTP' in url:
+                item.append({'name': 'link', 'attribute': {'href': 
url['LOCAL-FTP'], 'rel': 'enclosure', 'type': 'text/plain', 'title': 'FTP URL' 
}})
+            elif 'REMOTE-FTP' in url:
+                item.append({'name': 'link', 'attribute': {'href': 
url['REMOTE-FTP'], 'rel': 'enclosure', 'type': 'text/plain', 'title': 'FTP URL' 
}})
+        if doc['DatasetPolicy-ViewOnline'][0] == 'Y' and 
doc['DatasetPolicy-AccessType-Full'][0] in ['OPEN', 'PREVIEW', 'SIMULATED', 
'REMOTE']:
+            portalUrl = self.portalUrl+'/'+doc['Dataset-ShortName'][0]
+            item.append({'name': 'link', 'attribute': {'href': portalUrl, 
'rel': 'enclosure', 'type': 'text/html', 'title': 'Dataset Information' }})
+        updated = None
+        if 'DatasetMetaHistory-LastRevisionDateLong' in doc and 
doc['DatasetMetaHistory-LastRevisionDateLong'][0] != '':
+            updated = 
DateUtility.convertTimeLongToIso(doc['DatasetMetaHistory-LastRevisionDateLong'][0])
+        else:
+            updated = datetime.datetime.utcnow().isoformat()+'Z'
+        
+        item.append({'name': 'updated', 'value': updated})
+        """
+        item.append({'name': 'id', 'value': doc['_source']['identifier']})
+        """
+        item.append({'namespace': 'podaac', 'name': 'datasetId', 'value': 
doc['Dataset-PersistentId'][0]})
+        item.append({'namespace': 'podaac', 'name': 'shortName', 'value': 
doc['Dataset-ShortName'][0]})
+        """
+        if doc['_source']['west_longitude'] is not None and 
doc['_source']['south_latitude'] is not None and 
doc['_source']['east_longitude'] is not None and 
doc['_source']['north_latitude'] is not None:
+            item.append({'namespace': 'georss', 'name': 'where', 'value': 
{'namespace': 'gml', 'name': 'Envelope', 'value': [{'namespace': 'gml', 'name': 
'lowerCorner', 'value': ' '.join([str(doc['_source']['west_longitude']), 
str(doc['_source']['south_latitude'])]) }, {'namespace': 'gml', 'name': 
'upperCorner', 'value': ' '.join([str(doc['_source']['east_longitude']), 
str(doc['_source']['north_latitude'])])}]}})
+        
+        if 'start_time' in doc['_source'] and doc['_source']['start_time'] is 
not None:
+            item.append({'namespace': 'time', 'name': 'start', 'value': 
DateUtility.convertTimeLongToIso(doc['_source']['start_time'])})
+        
+        if 'stop_time' in doc['_source'] and doc['_source']['stop_time'] is 
not None:
+            item.append({'namespace': 'time', 'name': 'end', 'value': 
DateUtility.convertTimeLongToIso(doc['_source']['stop_time'])})
+        
+        if 'full' in self.parameters and self.parameters['full']:
+            self._populateItemWithAllMetadata(doc['_source'], item)

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/elasticsearch/opensearch/granuleatomresponse.py
----------------------------------------------------------------------
diff --git 
a/src/main/python/libraries/edge/elasticsearch/opensearch/granuleatomresponse.py
 
b/src/main/python/libraries/edge/elasticsearch/opensearch/granuleatomresponse.py
new file mode 100644
index 0000000..a4d8cb7
--- /dev/null
+++ 
b/src/main/python/libraries/edge/elasticsearch/opensearch/granuleatomresponse.py
@@ -0,0 +1,78 @@
+import datetime
+import urllib
+
+from edge.elasticsearch.opensearch.atomresponsebyelasticsearch import 
AtomResponseByElasticsearch
+from edge.dateutility import DateUtility
+
+class GranuleAtomResponse(AtomResponseByElasticsearch):
+    def __init__(self, linkToGranule, host, url):
+        super(GranuleAtomResponse, self).__init__()
+
+        self.linkToGranule = linkToGranule.split(',')
+        self.host = host
+        self.url = url
+
+    def _populateChannel(self, solrResponse):
+        self.variables.append({'name': 'link', 'attribute': {'href': 
self.url+self.searchBasePath+'podaac-dataset-osd.xml', 'rel': 'search', 'type': 
'application/opensearchdescription+xml' }})
+
+    def _populateItem(self, solrResponse, doc, item):
+        item.append({'name': 'title', 'value': doc['_source']['name']})
+        #item.append({'name': 'content', 'value': doc['Granule-Name'][0]})
+        
+        updated = None
+        startTime = None
+        if 'start_time' in doc['_source'] and doc['_source']['start_time'] is 
not None:
+            updated = 
DateUtility.convertTimeLongToIso(doc['_source']['start_time'])
+            startTime = updated
+        else:
+            updated = datetime.datetime.utcnow().isoformat()+'Z'
+        
+        item.append({'name': 'updated', 'value': updated})
+        item.append({'name': 'id', 'value': doc['_source']['identifier'] + ':' 
+ doc['_source']['name']})
+        
+        parameters = {'identifier': doc['_source']['identifier'], 'name': 
doc['_source']['name']}
+        parameters['full'] = 'true'
+        item.append({'name': 'link', 'attribute': {'href': 
self.url+self.searchBasePath + 'granule?' + urllib.urlencode(parameters), 
'rel': 'enclosure', 'type': 'application/atom+xml', 'title': 'GIBS Metadata' }})
+        del parameters['full']
+        '''
+        parameters['format'] = 'iso'
+        item.append({'name': 'link', 'attribute': {'href': 
self.url+self.metadataBasePath + 'granule?' +  urllib.urlencode(parameters), 
'rel': 'enclosure', 'type': 'text/xml', 'title': 'ISO-19115 Metadata' }})
+        parameters['format'] = 'fgdc'
+        item.append({'name': 'link', 'attribute': {'href': 
self.url+self.metadataBasePath + 'granule?' +  urllib.urlencode(parameters), 
'rel': 'enclosure', 'type': 'text/xml', 'title': 'FGDC Metadata' }})
+        
+        #item.append({'name': 'description', 'value': 
doc['Dataset-Description'][0]})
+        #item.append({'name': 'link', 'value': 
self.portalUrl+'/'+doc['Dataset-ShortName'][0]})
+        #link = self._getLinkToGranule(doc)
+        #if link['href'] is not None:
+        #    item.append({'name': 'link', 'attribute': link})
+        if 'GranuleReference-Type' in doc:
+            if 'Granule-DataFormat' in doc:
+                type = 'application/x-' + doc['Granule-DataFormat'][0].lower()
+            else:
+                type = 'text/plain'
+            #Look for ONLINE reference only
+            granuleRefDict = dict([(doc['GranuleReference-Type'][i], 
doc['GranuleReference-Path'][i]) for i,x in 
enumerate(doc['GranuleReference-Status']) if x=="ONLINE"])
+            if 'LOCAL-OPENDAP' in granuleRefDict:
+                item.append({'name': 'link', 'attribute': {'href': 
granuleRefDict['LOCAL-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 
'title': 'OPeNDAP URL' }})
+            elif 'REMOTE-OPENDAP' in granuleRefDict:
+                item.append({'name': 'link', 'attribute': {'href': 
granuleRefDict['REMOTE-OPENDAP'], 'rel': 'enclosure', 'type': 'text/html', 
'title': 'OPeNDAP URL' }})
+            if 'LOCAL-FTP' in granuleRefDict:
+                item.append({'name': 'link', 'attribute': {'href': 
granuleRefDict['LOCAL-FTP'], 'rel': 'enclosure', 'type': type, 'title': 'FTP 
URL' }})
+            elif 'REMOTE-FTP' in granuleRefDict:
+                item.append({'name': 'link', 'attribute': {'href': 
granuleRefDict['REMOTE-FTP'], 'rel': 'enclosure', 'type': type, 'title': 'FTP 
URL' }})
+        '''
+        item.append({'namespace': 'gibs', 'name': 'identifier', 'value': 
doc['_source']['identifier']})
+        '''
+        item.append({'namespace': 'podaac', 'name': 'shortName', 'value': 
doc['Dataset-ShortName'][0]})
+        
+        if 'GranuleSpatial-NorthLat' in doc and 'GranuleSpatial-EastLon' in 
doc and 'GranuleSpatial-SouthLat' in doc and 'GranuleSpatial-WestLon' in doc:
+            item.append({'namespace': 'georss', 'name': 'where', 'value': 
{'namespace': 'gml', 'name': 'Envelope', 'value': [{'namespace': 'gml', 'name': 
'lowerCorner', 'value': ' '.join([doc['GranuleSpatial-WestLon'][0], 
doc['GranuleSpatial-SouthLat'][0]])}, {'namespace': 'gml', 'name': 
'upperCorner', 'value': ' '.join([doc['GranuleSpatial-EastLon'][0], 
doc['GranuleSpatial-NorthLat'][0]])}]}})
+        '''
+        if startTime is not None:
+            item.append({'namespace': 'time', 'name': 'start', 'value': 
startTime})
+
+        if 'stop_time' in doc['_source'] and doc['_source']['stop_time'] is 
not None:
+            item.append({'namespace': 'time', 'name': 'end', 'value': 
DateUtility.convertTimeLongToIso(doc['_source']['stop_time'])})
+
+        if 'full' in self.parameters and self.parameters['full']:
+            self._populateItemWithAllMetadata(doc['_source'], item)

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/httputility.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/httputility.py 
b/src/main/python/libraries/edge/httputility.py
new file mode 100644
index 0000000..d3fd650
--- /dev/null
+++ b/src/main/python/libraries/edge/httputility.py
@@ -0,0 +1,13 @@
+import tornado.httpclient
+
+class HttpUtility(object):
+    def getResponse(self, url, callback, body=None, headers=None):
+        requestHeaders = {'Connection': 'close'}
+        if headers is not None:
+            requestHeaders.update(headers)
+        if body is not None:
+            request = tornado.httpclient.HTTPRequest(url, method='POST', 
headers=requestHeaders, request_timeout=30, body=body)
+        else:
+            request = tornado.httpclient.HTTPRequest(url, method='GET', 
headers=requestHeaders, request_timeout=30)
+        httpClient = tornado.httpclient.AsyncHTTPClient()
+        httpClient.fetch(request,callback=callback)

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/opensearch/__init__.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/opensearch/__init__.py 
b/src/main/python/libraries/edge/opensearch/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/opensearch/atomresponse.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/opensearch/atomresponse.py 
b/src/main/python/libraries/edge/opensearch/atomresponse.py
new file mode 100644
index 0000000..ddf8bdb
--- /dev/null
+++ b/src/main/python/libraries/edge/opensearch/atomresponse.py
@@ -0,0 +1,145 @@
+import logging
+
+from xml.dom.minidom import Document
+import xml.sax.saxutils
+
+from edge.opensearch.response import Response
+
+class AtomResponse(Response):
+    def __init__(self):
+        super(AtomResponse, self).__init__()
+        self.namespaces = {
+            '': 'http://www.w3.org/2005/Atom',
+            'opensearch': 'http://a9.com/-/spec/opensearch/1.1/',
+            'podaac': 'http://podaac.jpl.nasa.gov/opensearch/',
+            'georss': 'http://www.georss.org/georss',
+            'gml': 'http://www.opengis.net/gml',
+            'time': 'http://a9.com/-/opensearch/extensions/time/1.0/'
+        }
+
+        self.title = None
+        self.link = None
+        self.update = None
+        self.authors = []
+        self.variables = []
+        self.items = []
+        self.id = None
+        self.updated = None
+        self.parameters = {}
+
+    def addNamespace(self, name, uri):
+        self.namespaces[name] = uri
+
+    def removeNamespace(self, name):
+        del self.namespaces[name]
+
+    def generate(self, pretty=False):
+        logging.debug('AtomResponse.generate is called.')
+
+        document = Document()
+        feed = document.createElement('feed')
+        for namespace in self.namespaces.keys():
+            namespaceAttr = 'xmlns'
+            if namespace != '':
+                namespaceAttr += ':'+namespace
+            feed.setAttribute(namespaceAttr, self.namespaces[namespace])
+        document.appendChild(feed)
+
+        title = document.createElement('title')
+        feed.appendChild(title)
+        
title.appendChild(document.createTextNode(xml.sax.saxutils.escape(self.title)))
+        '''
+        link = document.createElement('link')
+        feed.appendChild(link)
+        
link.appendChild(document.createTextNode(xml.sax.saxutils.escape(self.link)))
+        '''
+
+        updated = document.createElement('updated')
+        feed.appendChild(updated)
+        
updated.appendChild(document.createTextNode(xml.sax.saxutils.escape(self.updated)))
+
+        id = document.createElement('id')
+        feed.appendChild(id)
+        
id.appendChild(document.createTextNode(xml.sax.saxutils.escape(self.id)))
+
+        author = document.createElement('author')
+        feed.appendChild(author)
+        for authorName in self.authors:
+            authorElement = document.createElement('name')
+            author.appendChild(authorElement)
+            
authorElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(authorName)))
+
+        for variable in self.variables:
+            '''
+            elementName = variable['name']
+            if 'namespace' in variable:
+                elementName = variable['namespace']+':'+elementName
+
+            variableElement = document.createElement(elementName)
+            feed.appendChild(variableElement)
+            
variableElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(str(variable['value']))))
+            '''
+            self._createNode(document, variable, feed)
+
+        for item in self.items:
+            itemElement = document.createElement('entry')
+            feed.appendChild(itemElement)
+
+            for itemEntry in item:
+                self._createNode(document, itemEntry, itemElement);
+                '''
+                elementName = itemEntry['name']
+                if 'namespace' in itemEntry:
+                    elementName = itemEntry['namespace']+':'+elementName
+
+                variableElement = document.createElement(elementName)
+                itemElement.appendChild(variableElement)
+
+                if 'value' in itemEntry:
+                    value = itemEntry['value']
+                    if isinstance(value, list):
+                        if len(value) > 1:
+                            for valueEntry in value:
+                                valueName = 'value'
+                                if 'namespace' in itemEntry:
+                                    valueName = 
itemEntry['namespace']+':'+valueName
+                                valueElement = 
document.createElement(valueName)
+                                variableElement.appendChild(valueElement)
+                                
valueElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(str(valueEntry))))
+                        else:
+                            
variableElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(str(value[0]))))
+                    elif isinstance(value, dict):
+                        for key in value.keys():
+                            valueName = key
+                            if 'namespace' in itemEntry:
+                                valueName = 
itemEntry['namespace']+':'+valueName
+                            valueElement = document.createElement(valueName)
+                            variableElement.appendChild(valueElement)
+                            
valueElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(str(value[key]))))
+                    else:
+                        
variableElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(str(value))))
+                else:
+                    if 'attribute' in itemEntry:
+                        for attr in itemEntry['attribute'].keys():
+                            variableElement.setAttribute(attr, 
itemEntry['attribute'][attr])
+                '''
+        return document.toprettyxml() if pretty else document.toxml('utf-8')
+
+    def _createNode(self, document, itemEntry, itemElement):
+        elementName = itemEntry['name']
+        if 'namespace' in itemEntry:
+            elementName = itemEntry['namespace']+':'+elementName
+        variableElement = document.createElement(elementName)
+        itemElement.appendChild(variableElement)
+        if 'value' in itemEntry:
+            value = itemEntry['value']
+            if isinstance(value, list):
+                for valueEntry in value:
+                    self._createNode(document, valueEntry, variableElement)
+            elif isinstance(value, dict):
+                self._createNode(document, value, variableElement)
+            else:
+                
variableElement.appendChild(document.createTextNode(xml.sax.saxutils.escape(str(value))))
+        if 'attribute' in itemEntry:
+            for attr in itemEntry['attribute'].keys():
+                variableElement.setAttribute(attr, 
itemEntry['attribute'][attr])

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/opensearch/atomresponsebysolr.py
----------------------------------------------------------------------
diff --git a/src/main/python/libraries/edge/opensearch/atomresponsebysolr.py 
b/src/main/python/libraries/edge/opensearch/atomresponsebysolr.py
new file mode 100644
index 0000000..c63fd5f
--- /dev/null
+++ b/src/main/python/libraries/edge/opensearch/atomresponsebysolr.py
@@ -0,0 +1,134 @@
+import json
+import urllib
+
+from edge.opensearch.atomresponse import AtomResponse
+from collections import defaultdict
+
+class AtomResponseBySolr(AtomResponse):
+    def __init__(self):
+        super(AtomResponseBySolr, self).__init__()
+
+    def generate(self, solrResponse, pretty=False):
+        self._populate(solrResponse)
+        return super(AtomResponseBySolr, self).generate(pretty)
+
+    def _populate(self, solrResponse):
+        #response.title = 'OCSI Dataset Search: '+searchText
+        #response.description = 'Search result for "'+searchText+'"'
+        #response.link = searchUrl
+        self._populateChannel(solrResponse)
+
+        if solrResponse is None:
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'totalResults', 'value': 1}
+            )
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'startIndex', 'value': 1}
+            )
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'itemsPerPage', 'value': 1}
+            )
+            self.parameters['startIndex'] = 0
+            url = self.link + '?' + urllib.urlencode(self.parameters)
+            self.variables.append({'name': 'link', 'attribute': {'href': url, 
'rel': 'self', 'type': 'application/atom+xml'}})
+            self.variables.append({'name': 'link', 'attribute': {'href': url, 
'rel': 'first', 'type': 'application/atom+xml'}})
+            item = [
+                {'name': 'title', 'value': 'Error'},
+                {'name': 'content', 'value': 'error'}
+            ]
+            self.items.append(item)
+        else:
+            #logging.debug(solrResponse)
+            solrJson = json.loads(solrResponse)
+            numFound = int(solrJson['response']['numFound'])
+            start = int(solrJson['response']['start'])
+            rows = int(solrJson['responseHeader']['params']['rows'])
+
+            self.parameters['startIndex'] = start
+            self.variables.append({'name': 'link', 'attribute': {'href': 
self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'self', 'type': 
'application/atom+xml'}})
+            self.parameters['startIndex'] = 0
+            self.variables.append({'name': 'link', 'attribute': {'href': 
self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'first', 'type': 
'application/atom+xml'}})
+            if start > 0:
+                if (start - rows > 0):
+                    self.parameters['startIndex'] = start - rows
+                self.variables.append({'name': 'link', 'attribute': {'href': 
self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'previous', 'type': 
'application/atom+xml'}})
+            if start + rows < numFound:
+                self.parameters['startIndex'] = start + rows
+                self.variables.append({'name': 'link', 'attribute': {'href': 
self.link + '?' + urllib.urlencode(self.parameters), 'rel': 'next', 'type': 
'application/atom+xml'}})
+            
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'totalResults', 'value': 
solrJson['response']['numFound']}
+            )
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'startIndex', 'value': 
solrJson['response']['start']}
+            )
+            self.variables.append(
+                {'namespace': 'opensearch', 'name': 'itemsPerPage', 'value': 
solrJson['responseHeader']['params']['rows']}
+            )
+
+            for doc in solrJson['response']['docs']:
+                """
+                item = [
+                    {'name': 'title', 'value': doc['Dataset-LongName'][0]},
+                    {'name': 'description', 'value': 
doc['Dataset-Description'][0]},
+                    {'name': 'link', 'value': 
self._configuration.get('portal', 'datasetUrl')+'/'+doc['Dataset-ShortName'][0]}
+                ]
+                """
+                item = []
+                '''
+                #Handle dataset_location_policy values differently
+                if 'DatasetLocationPolicy-Type' in doc and 
'DatasetLocationPolicy-BasePath' in doc:
+                    for i, x in enumerate(doc['DatasetLocationPolicy-Type']):
+                        item.append({'namespace': 'podaac', 'name': 
self._camelCaseStripHyphen(x.title()), 'value': 
doc['DatasetLocationPolicy-BasePath'][i]})
+                    del doc['DatasetLocationPolicy-Type']
+                    del doc['DatasetLocationPolicy-BasePath']
+                
+                multiValuedElementsKeys = ('DatasetRegion-', 
'DatasetCharacter-', 'DatasetCitation-', 'DatasetContact-Contact-', 
'DatasetDatetime-', 
+                                           'DatasetInteger-', 
'DatasetParameter-', 'DatasetProject-', 'DatasetReal-', 'DatasetResource-', 
+                                           'DatasetSoftware-', 
'DatasetSource-', 'DatasetVersion-', 'Collection-',
+                                           'GranuleArchive-', 
'GranuleReference-', 'GranuleReal-')
+                multiValuedElements = defaultdict(list)
+                for docKey in doc.keys():
+                    if docKey.startswith(multiValuedElementsKeys):
+                        multiValuedElements[docKey.split('-', 
1)[0]].append(docKey)
+                    else:
+                        item.append({'namespace': 'podaac', 'name': 
self._camelCaseStripHyphen(docKey), 'value': doc[docKey]})
+                for multiValuedKey in multiValuedElements:
+                    for i, x in 
enumerate(doc[multiValuedElements[multiValuedKey][0]]):
+                        values = {}
+                        for key in multiValuedElements[multiValuedKey]:
+                            values[self._camelCaseStripHyphen(key.split('-', 
1)[1])] = doc[key][i]
+                        item.append({'namespace': 'podaac', 'name': 
self._camelCaseStripHyphen(multiValuedKey), 'value': values})
+                '''
+                self._populateItem(solrResponse, doc, item)
+                self.items.append(item)
+
+    def _populateChannel(self, solrResponse):
+        pass
+
+    def _populateItem(self, solrResponse, doc, item):
+        pass
+    
+    def _populateItemWithPodaacMetadata(self, doc, item, 
multiValuedElementsKeys):
+        ignoreElementsEndingWith = ('-Full', '-Long')
+        multiValuedElements = defaultdict(list)
+        for docKey in doc.keys():
+            if docKey.startswith(multiValuedElementsKeys):
+                multiValuedElements[docKey.split('-', 1)[0]].append(docKey)
+            elif not docKey.endswith(ignoreElementsEndingWith):
+                if len(doc[docKey]) > 1:
+                    item.append({'namespace': 'podaac', 'name': 
self._camelCaseStripHyphen(docKey), 'value': [{'namespace': 'podaac', 'name': 
'value', 'value': x} for x in doc[docKey]]})
+                else:
+                    item.append({'namespace': 'podaac', 'name': 
self._camelCaseStripHyphen(docKey), 'value': doc[docKey][0]})
+        for multiValuedKey in multiValuedElements:
+            for i, x in enumerate(doc[multiValuedElements[multiValuedKey][0]]):
+                values = []
+                for key in multiValuedElements[multiValuedKey]:
+                    if not key.endswith(ignoreElementsEndingWith):
+                        values.append({'namespace': 'podaac', 'name': 
self._camelCaseStripHyphen(key.split('-', 1)[1]), 'value': doc[key][i]})
+                item.append({'namespace': 'podaac', 'name': 
self._camelCaseStripHyphen(multiValuedKey), 'value': values})
+
+    def _camelCaseStripHyphen(self, key):
+        #special case to remove duplicate element, contact from element tag
+        key = key.replace('-Element-', '', 1).replace('Contact-', '', 1)
+        return key[0].lower() + key[1:].replace('-', '')

http://git-wip-us.apache.org/repos/asf/incubator-sdap-edge/blob/53351bf3/src/main/python/libraries/edge/opensearch/datacastingresponsebysolr.py
----------------------------------------------------------------------
diff --git 
a/src/main/python/libraries/edge/opensearch/datacastingresponsebysolr.py 
b/src/main/python/libraries/edge/opensearch/datacastingresponsebysolr.py
new file mode 100644
index 0000000..b560d7e
--- /dev/null
+++ b/src/main/python/libraries/edge/opensearch/datacastingresponsebysolr.py
@@ -0,0 +1,71 @@
+import json
+import logging
+
+from edge.opensearch.fgdcresponse import FgdcResponse
+from edge.dateutility import DateUtility
+
+class DatacastingResponseBySolr(FgdcResponse):
+    def __init__(self, portalUrl, archivedWithin):
+        super(DatacastingResponseBySolr, self).__init__()
+        
+        self.addNamespace("datacasting", 
"http://datacasting.jpl.nasa.gov/datacasting";)
+        self.addNamespace("georss", "http://www.georss.org/georss";)
+        self.addNamespace("gml", "http://www.opengis.net/gml";)
+        
+        self.portalUrl = portalUrl
+        self.archivedWithin = archivedWithin
+
+    def generate(self, solrDatasetResponse, solrGranuleResponse = None, 
pretty=False):
+        self._populate(solrDatasetResponse, solrGranuleResponse)
+        return super(DatacastingResponseBySolr, self).generate(pretty)
+
+    def _populate(self, solrDatasetResponse, solrGranuleResponse = None):
+        if solrDatasetResponse is not None:
+            solrJson = json.loads(solrDatasetResponse)
+
+            logging.debug('dataset count: 
'+str(len(solrJson['response']['docs'])))
+
+            if len(solrJson['response']['docs']) == 1:
+                # ok now populate variables!
+                doc = solrJson['response']['docs'][0]
+                
+                self.variables['doc'] = doc
+                
+                # Format dates
+                try:
+                    self.variables['DatasetCitation_ReleaseYear'] = 
DateUtility.convertTimeLong(doc['DatasetCitation-ReleaseDateLong'][0], '%Y')
+                except:
+                    pass
+                
+                # Link to dataset portal page
+                self.variables['DatasetPortalPage'] = 
self.portalUrl+'/'+doc['Dataset-ShortName'][0]
+                
+                # Set default pub date to x hours ago because we cast all 
granules archived within the last x hours
+                self.variables['PubDate'] = 
DateUtility.pastDateRFC822(self.archivedWithin)
+            else:
+                raise Exception('No dataset found')
+                
+        if solrGranuleResponse is not None:
+            solrGranuleJson = json.loads(solrGranuleResponse)
+            
+            logging.debug('granule count: 
'+str(len(solrGranuleJson['response']['docs'])))
+            
+            pubDate = 0
+            for doc in solrGranuleJson['response']['docs']:
+                if (doc['Granule-ArchiveTimeLong'][0] > pubDate):
+                    pubDate = doc['Granule-ArchiveTimeLong'][0]
+                self._populateItem(solrGranuleResponse, doc, None)
+            
+            if pubDate != 0:
+                # Set pub date to latest granule archive date
+                self.variables['PubDate'] = 
DateUtility.convertTimeLongToRFC822(pubDate)
+                
+            self.variables['granules'] = solrGranuleJson['response']['docs']
+        else:
+            raise Exception('No granules found')
+                
+    def _populateChannel(self, solrResponse):
+        pass
+
+    def _populateItem(self, solrResponse, doc, item):
+        pass

Reply via email to