Skip to content

Commit

Permalink
Merge pull request #61 from BenB196/staging
Browse files Browse the repository at this point in the history
Merge staging to master and bump version
  • Loading branch information
BenB196 authored Oct 16, 2019
2 parents 53c9ac4 + faf4205 commit 9cc358d
Show file tree
Hide file tree
Showing 6 changed files with 385 additions and 107 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ Currently, only JSON formatted configuration files are accepted, in the future Y
"logstash": { #Logstash output
"logstashURL": "192.168.1.105:8080" #Address of logstash
}
"esStandardized": true, #esStandardized This allows for the output to be formatted in an elastic standardized output
"esStandardized": "", #esStandardized This allows for the output to be formatted in standard Crashplan FFS (""), Semi Elastic Standard ("half"), or full Elastic Standard ("full")
"validIpAddressesOnly": true #Setting this to true makes the private IP Addresses valid. By default Crashplan FFS provides invalid private IP addresses.
},
{
Expand Down Expand Up @@ -211,7 +211,7 @@ If you are using the elastic output type there are a few important things to und
1. bestCompression
1. refreshInterval
1. aliases
1. If you use the esStandardized output, there is currently no build in template for this. Therefore you need to provide an index template on the elasticsearch side.
1. If you use the esStandardized output (half or full), there is currently no build in template for this. Therefore you need to provide an index template on the elasticsearch side.

### Logstash Integration

Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.1.7
0.1.8
7 changes: 6 additions & 1 deletion config/configReader.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ type FFSQuery struct {
IPAPI IPAPI `json:"ip-api,omitempty"`
Elasticsearch Elasticsearch `json:"elasticsearch,omitempty"`
Logstash Logstash `json:"logstash,omitempty"`
EsStandardized bool `json:"esStandardized,omitempty"`
EsStandardized string `json:"esStandardized,omitempty"`
ValidIpAddressesOnly bool `json:"validIpAddressesOnly"`
}

Expand Down Expand Up @@ -425,6 +425,11 @@ func validateConfigJson(fileBytes []byte) (Config, error) {
}
}

//validate esStandardized
if query.EsStandardized != "" && !strings.EqualFold(query.EsStandardized,"full") && !strings.EqualFold(query.EsStandardized,"half") {
return config, errors.New("unknown value for esStandardized, values can either be full, half, or \"\"")
}

//Validate ip-api
if query.IPAPI != (IPAPI{}) && query.IPAPI.Enabled {

Expand Down
12 changes: 6 additions & 6 deletions elasticsearch/logstash.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package elasticsearch

import (
"net"
"time"
)

func CreateLogstashClient(logstashURL string) (net.Conn,error) {
Expand All @@ -10,13 +11,12 @@ func CreateLogstashClient(logstashURL string) (net.Conn,error) {
if err != nil {
return nil, err
}

connection, err := net.DialTCP("tcp",nil,tcpAddr)

if err != nil {
return nil, err

d := net.Dialer{
Timeout: 30 * time.Second,
}
err = connection.SetWriteBuffer(100000)

connection, err := d.Dial("tcp", tcpAddr.String())

if err != nil {
return nil, err
Expand Down
71 changes: 64 additions & 7 deletions eventOutput/fileHandler.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,22 +16,74 @@ import (

type FFSEvent struct {
ffs.FileEvent
ip_api.Location `json:",omitempty"`
*ip_api.Location `json:",omitempty"`
*GeoPoint `json:"geoPoint,omitempty"`
}

type SemiElasticFFSEvent struct {
FileEvent SemiElasticFileEvent `json:"file_event"`
Geoip *Geoip `json:"geoip,omitempty"`
}

type SemiElasticFileEvent struct {
EventId string `json:"event_id"`
EventType string `json:"event_type"`
EventTimestamp *time.Time `json:"event_timestamp,omitempty"`
InsertionTimestamp *time.Time `json:"insertion_timestamp,omitempty"`
FilePath string `json:"file_path,omitempty"`
FileName string `json:"file_name"`
FileType string `json:"file_type,omitempty"`
FileCategory string `json:"file_category,omitempty"`
FileSize *int `json:"file_size"`
FileOwner []string `json:"file_owner,omitempty"` //Array of owners
Md5Checksum string `json:"md5_checksum,omitempty"`
Sha256Checksum string `json:"sha256_checksum,omitempty"`
CreatedTimestamp *time.Time `json:"created_timestamp,omitempty"`
ModifyTimestamp *time.Time `json:"modify_timestamp,omitempty"`
DeviceUsername string `json:"device_username,omitempty"`
DeviceUid string `json:"device_uid,omitempty"`
UserUid string `json:"user_uid,omitempty"`
OsHostname string `json:"os_hostname,omitempty"`
DomainName string `json:"domain_name,omitempty"`
PublicIpAddress string `json:"public_ip_address,omitempty"`
PrivateIpAddresses []string `json:"private_ip_addresses,omitempty"` //Array of IP address strings
Actor string `json:"actor,omitempty"`
DirectoryId []string `json:"directory_id,omitempty"` //An array of something, I am not sure
Source string `json:"source,omitempty"`
Url string `json:"url,omitempty"`
Shared string `json:"shared,omitempty"`
SharedWith []string `json:"shared_with,omitempty"` //An array of strings (Mainly Email Addresses)
SharingTypeAdded []string `json:"sharing_type_added,omitempty"`
CloudDriveId string `json:"cloud_drive_id,omitempty"`
DetectionSourceAlias string `json:"detection_source_alias,omitempty"`
FileId string `json:"file_id,omitempty"`
Exposure []string `json:"exposure,omitempty"`
ProcessOwner string `json:"process_owner,omitempty"`
ProcessName string `json:"process_name,omitempty"`
RemovableMediaVendor string `json:"removable_media_vendor,omitempty"`
RemovableMediaName string `json:"removable_media_name,omitempty"`
RemovableMediaSerialNumber string `json:"removable_media_serial_number,omitempty"`
RemovableMediaCapacity *int `json:"removable_media_capacity,omitempty"`
RemovableMediaBusType string `json:"removable_media_bus_type,omitempty"`
RemovableMediaMediaName string `json:"removable_media_media_name,omitempty"`
RemovableMediaVolumeName string `json:"removable_media_volume_name,omitempty"`
RemovableMediaPartitionId string `json:"removable_media_partition_id,omitempty"`
SyncDestination string `json:"sync_destination,omitempty"`
}

type ElasticFFSEvent struct {
FileEvent ElasticFileEvent `json:"file_event"`
Geoip *Geoip `json:"geoip"`
Geoip *Geoip `json:"geoip,omitempty"`
}

type ElasticFileEvent struct {
Event *Event `json:"event,omitempty"`
Insertion *Insertion `json:"insertion,omitempty"`
File *File `json:"file"`
Device *Device `json:"device"`
Cloud *Cloud `json:"cloud"`
Process *Process `json:"process"`
File *File `json:"file,omitempty"`
Device *Device `json:"device,omitempty"`
Cloud *Cloud `json:"cloud,omitempty"`
Exposure []string `json:"exposure,omitempty"`
Process *Process `json:"process,omitempty"`
RemovableMedia *RemovableMedia `json:"removable_media,omitempty"`
SyncDestination string `json:"sync_destination,omitempty"`
}
Expand Down Expand Up @@ -80,7 +132,6 @@ type Cloud struct {
CloudDriveId string `json:"drive_id,omitempty"`
DetectionSourceAlias string `json:"detection_source_alias,omitempty"`
FileId string `json:"file_id,omitempty"`
Exposure []string `json:"exposure,omitempty"`
}

type Process struct {
Expand Down Expand Up @@ -195,6 +246,12 @@ func WriteEvents (ffsEvents interface{}, query config.FFSQuery) error {
return errors.New("error: flushing file: " + fileName + " " + err.Error())
}

err = file.Sync()

if err != nil {
return errors.New("error: syncing file: " + fileName + " " + err.Error())
}

return nil
}

Expand Down
Loading

0 comments on commit 9cc358d

Please sign in to comment.