Hi,
If you use ansible. I have a working solution I created
Export_Groups_Host_Network_AddressRange_in_a_Group_to_CSV.yml
Note: to use json_query filter run "pip install jmespath" on the command line
---
- name: Get Hosts, Networks, Subgroups, Address-Range in a Group and export to CSV
hosts: check_point
connection: httpapi
gather_facts: False
# perform your authentication
vars_files:
- 'login.yml'
tasks:
# create group, if group exist, do nothing
- name: if-group-doesn't-exist-create-one
check_point.mgmt.cp_mgmt_group:
name: Your_Group
state: present
delegate_to: Your_Domain
ignore_errors: yes
# grab all the details in the group
# Note: if we have about 3000 objects
# or thousands of objectin a group, becaus of overload
# then use details_level: standard
# this will exclude any comments created in a object
- name: get-details-from-group
check_point.mgmt.cp_mgmt_group_facts:
name: Your_Group
details_level: full
register: result
delegate_to: Your_Domain
ignore_errors: yes
# apply filter and get only members present in the group
- name: filter-result-from-group-facts
set_fact:
filtered_group_facts: "{{ result['ansible_facts']['group']['members'] }}"
register: device
ignore_errors: yes
# get only address range present in the group using json query filter run "pip install jmespath" on the command line to enable this filter
- name: get-address-range-if-present
vars:
reduce_query: >-
[].{
type: type,
comments: comments,
"ipv4-address-first": "ipv4-address-first",
"ipv4-address-last": "ipv4-address-last",
name: name
}
new_address_range: "{{ filtered_group_facts | json_query(reduce_query) }}"
register: result_address_range
debug:
var: new_address_range
no_log: yes
ignore_errors: yes
# get only host present in the group using json query filter
- name: get-hosts-if-present
vars:
reduce_query: >-
[].{
type: type,
comments: comments,
"ipv4-address": "ipv4-address",
name: name
}
new_host: "{{ filtered_group_facts | json_query(reduce_query) }}"
register: result_host
debug:
var: new_host
no_log: yes
ignore_errors: yes
# get only network present in the group using json query filter
- name: get-networks-if-present
vars:
reduce_query: >-
[].{
type: type,
name: name
"subnet-mask": "subnet-mask"
subnet4: subnet4,
comments: comments
}
new_network: "{{ filtered_group_facts | json_query(reduce_query) }}"
register: result_network
debug:
var: new_network
no_log: yes
ignore_errors: yes
# get only subgroup present in the group using json query filter
- name: get-subgroup-if-present
vars:
reduce_query: >-
[].{
type: type,
name: name,
comments: comments
}
new_group: "{{ filtered_group_facts | json_query(reduce_query) }}"
register: result_group
debug:
var: new_group
no_log: yes
ignore_errors: yes
# extract only address range
- name: get-only-address-range
set_fact:
address_range_lists: "{{ result_address_range.new_address_range | rejectattr('ipv4-address-first', 'match', 'None') | flatten }}"
ignore_errors: yes
# extract only host if it exist
- name: get-only-host
set_fact:
host_lists: "{{ (result_network.new_network | rejectattr('subnet-mask', 'match', 'None') | flatten) }}"
ignore_errors: yes
# extract only network if it exist
- name: get-only-network
set_fact:
network_lists: "{{ result_host.new_host | rejectattr('ipv4-address','match', 'None') | flatten }}"
ignore_errors: yes
# extract group if it exist
- name: get-only-group
set_fact:
group_lists: "{{ result_group.new_group | selectattr('type','match', 'group') | flatten }}"
# group_lists: "{{ result_group.new_group | rejectattr('members','match', 'None') | flatten }}"
ignore_errors: yes
# append the list so we can group them
- name: combine list
set_fact:
list_merged: "{{ network_lists + host_lists + group_lists + address_range_lists }}"
ignore_errors: yes
# create log files
- name: copy-file-to-log
local_action:
module: copy
content: "{{ list_merged | to_nice_yaml}}"
dest: tmp/log.yml
changed_when: false
# # run python script to convert yaml to csv
- name: run-script-to-do-conversion
script:
cmd: /usr/bin/python3 ./Yaml_to_CSV.py
# Note to use json_query file run "pip install jmespath" on the command line
# Note: create a diretory folder 'tmp' and create a file 'log.yml' inside the directory "tmp/log.yml"
# We will parse data into log.yml and resuse it in our python script
Credentials
login.yml
ansible_user: Enter_Username
ansible_password: Enter_Password
ansible_httpapi_validate_certs: False
ansible_network_os: check_point.mgmt.checkpoint
ansible_python_interpreter: "python"
# Note: Username and Password you use to log into SmartConsole Checkpoint
hosts
[check_point]
checkpoint ansible_host=Enter_Address_of_Host
[check_point:vars]
ansible_python_interpreter= "python"
[cma]
Domain_1 ansible_host=Enter_Address_of_Host ansible_checkpoint_domain=Domain_1
Domain_2 ansible_host=Enter_Address_of_Host ansible_checkpoint_domain=Domain_2
Domain_3 ansible_host=Enter_Address_of_Host ansible_checkpoint_domain=Domain_3
[cmas:vars]
ansible_python_interpreter= "python"
# Note: use Domain_1, Domain_2, Domain_3, etc. in case of Multiple Domains
Yaml_to_CSV.py
import csv
import yaml
# creating header for the csv file
fields = {
'name' : 'Name',
'ipv4-address' : 'IP',
'subnet4' : 'Subnet4',
'subnet-mask' : 'Subnet-Mask',
'ipv4-address-first' : 'IPv4-Address-First',
'ipv4-address-last' : 'IPv4-Address-Last',
'comments' : 'Comments',
'type' : 'Type',
}
# open fileand write header data to file
with open('Converted_Output.csv', 'w', newline='') as f_output:
csv_output = csv.DictWriter(f_output, fieldnames=fields.values())
csv_output.writeheader()
# open log file and key values to file
for filename in ['tmp/log.yml']:
with open(filename) as f_input:
for row_yaml in yaml.safe_load(f_input):
row_csv = {fields[key] : value for key, value in row_yaml.items()}
csv_output.writerow(row_csv)
# rember to create a file "Converted_Output.csv"
Finally, your
- Converted_Output.csv,
- host,
- login.yml,
- tmp/log.yml -> a folder "tmp" with a file "log.yml"
- playbook (Export_Groups_Host_Network_AddressRange_in_a_Group_to_CSV.yml) should be in one directory .
You can reach out to me if you are confused or you have more questions