Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for importing as jsonb #18

Merged
merged 3 commits into from
Jun 2, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
language: go
sudo: false
addons:
postgresql: 9.3
postgresql: 9.4
go:
- 1.4
- tip
Expand Down
4 changes: 2 additions & 2 deletions import.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ func NewCSVImport(db *sql.DB, schema string, tableName string, columns []string)
return newImport(db, schema, tableName, columns)
}

func NewJSONImport(db *sql.DB, schema string, tableName string, column string) (*Import, error) {
func NewJSONImport(db *sql.DB, schema string, tableName string, column string, dataType string) (*Import, error) {

table, err := createJSONTable(db, schema, tableName, column)
table, err := createJSONTable(db, schema, tableName, column, dataType)
if err != nil {
return nil, err
}
Expand Down
9 changes: 4 additions & 5 deletions json.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ func copyJSONRows(i *Import, reader *bufio.Reader, ignoreErrors bool) (error, in
return nil, success, failed
}

func importJSONObject(filename string, connStr string, schema string, tableName string) error {
func importJSONObject(filename string, connStr string, schema string, tableName string, dataType string) error {
db, err := connect(connStr, schema)
if err != nil {
return err
Expand All @@ -87,7 +87,7 @@ func importJSONObject(filename string, connStr string, schema string, tableName
return err
}

i, err := NewJSONImport(db, schema, tableName, "data")
i, err := NewJSONImport(db, schema, tableName, "data", dataType)
if err != nil {
return err
}
Expand All @@ -103,15 +103,15 @@ func importJSONObject(filename string, connStr string, schema string, tableName
return i.Commit()
}

func importJSON(filename string, connStr string, schema string, tableName string, ignoreErrors bool) error {
func importJSON(filename string, connStr string, schema string, tableName string, ignoreErrors bool, dataType string) error {

db, err := connect(connStr, schema)
if err != nil {
return err
}
defer db.Close()

i, err := NewJSONImport(db, schema, tableName, "data")
i, err := NewJSONImport(db, schema, tableName, "data", dataType)
if err != nil {
return err
}
Expand All @@ -134,7 +134,6 @@ func importJSON(filename string, connStr string, schema string, tableName string
bar.Finish()
}


if err != nil {
lineNumber := success + failed
return errors.New(fmt.Sprintf("line %d: %s", lineNumber, err))
Expand Down
19 changes: 17 additions & 2 deletions pgfutter.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,15 @@ func parseTableName(c *cli.Context, filename string) string {
return postgresify(tableName)
}

func getDataType(c *cli.Context) string {
dataType := "json"
if c.GlobalBool("jsonb") {
dataType = "jsonb"
}

return dataType
}

func main() {
app := cli.NewApp()
app.Name = "pgfutter"
Expand Down Expand Up @@ -82,6 +91,10 @@ func main() {
Usage: "destination table",
EnvVar: "DB_TABLE",
},
cli.BoolFlag{
Name: "jsonb",
Usage: "use JSONB data type",
},
cli.BoolFlag{
Name: "ignore-errors",
Usage: "halt transaction on inconsistencies",
Expand All @@ -100,9 +113,10 @@ func main() {
ignoreErrors := c.GlobalBool("ignore-errors")
schema := c.GlobalString("schema")
tableName := parseTableName(c, filename)
dataType := getDataType(c)

connStr := parseConnStr(c)
err := importJSON(filename, connStr, schema, tableName, ignoreErrors)
err := importJSON(filename, connStr, schema, tableName, ignoreErrors, dataType)
exitOnError(err)
},
},
Expand All @@ -116,9 +130,10 @@ func main() {

schema := c.GlobalString("schema")
tableName := parseTableName(c, filename)
dataType := getDataType(c)

connStr := parseConnStr(c)
err := importJSONObject(filename, connStr, schema, tableName)
err := importJSONObject(filename, connStr, schema, tableName, dataType)
exitOnError(err)
},
},
Expand Down
6 changes: 3 additions & 3 deletions postgres.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,10 +99,10 @@ func parseConnStr(c *cli.Context) string {
)
}

//create table with a single JSON column data
func createJSONTable(db *sql.DB, schema string, tableName string, column string) (*sql.Stmt, error) {
//create table with a single JSON or JSONB column data
func createJSONTable(db *sql.DB, schema string, tableName string, column string, dataType string) (*sql.Stmt, error) {
fullyQualifiedTable := fmt.Sprintf("%s.%s", schema, tableName)
tableSchema := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (%s JSON)", fullyQualifiedTable, column)
tableSchema := fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s (%s %s)", fullyQualifiedTable, column, dataType)

statement, err := db.Prepare(tableSchema)
return statement, err
Expand Down
28 changes: 27 additions & 1 deletion test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,12 @@ function query_counts() {
echo "$counts"
}

function query_field_type() {
local table=$1
local data_type=$(psql -U ${DB_USER} -d ${DB_NAME} -t -c "SELECT data_type FROM information_schema.columns WHERE table_schema='${DB_SCHEMA}' AND table_name='${table}'")
echo "$data_type"
}

function test_readme_csv_sample() {
# test whether readme docs still work
echo "test"
Expand Down Expand Up @@ -63,7 +69,22 @@ function import_and_test_json() {
exit 300
else
local db_count=$(query_counts $table)
echo "Imported $(expr $db_count) records into $table"
local data_type=$(query_field_type $table)
echo "Imported $(expr $db_count) records into $table as $data_type"
fi
}

function import_and_test_json_as_jsonb() {
local table=$1
local filename=$2
pgfutter --schema $DB_SCHEMA --db $DB_NAME --user $DB_USER --jsonb json "$filename"
if [ $? -ne 0 ]; then
echo "pgfutter could not import $filename"
exit 300
else
local db_count=$(query_counts $table)
local data_type=$(query_field_type $table)
echo "Imported $(expr $db_count) records into $table as $data_type"
fi
}

Expand All @@ -90,6 +111,11 @@ import_csv_and_skip_header_row_with_custom_fields
import_csv_with_special_delimiter_and_trailing

import_and_test_json "_2015_01_01_15" "$SAMPLES_DIR/2015-01-01-15.json"

# We change the type of the data column for this test, so we have to recreate the database
recreate_db
import_and_test_json_as_jsonb "_2015_01_01_15" "$SAMPLES_DIR/2015-01-01-15.json"

import_and_test_csv "parking_garage_availability" "$SAMPLES_DIR/parking_garage_availability.csv"
# File can no longer be downloaded
#import_and_test_csv "local_severe_wheather_warning_systems" "$SAMPLES_DIR/local_severe_wheather_warning_systems.csv"
Expand Down