Documentation
¶
Overview ¶
Package tinysql provides a lightweight, embeddable SQL database for Go applications.
TinySQL is an educational SQL database that demonstrates core database concepts including:
- SQL parsing and execution (DDL, DML, SELECT with joins, aggregates, CTEs)
- Multi-Version Concurrency Control (MVCC) with snapshot isolation
- Write-Ahead Logging (WAL) for durability and crash recovery
- Multi-tenancy support for isolated data namespaces
- In-memory and persistent storage with GOB serialization
Basic Usage ¶
Create a database, execute SQL, and query results:
db := tinysql.NewDB()
ctx := context.Background()
// Parse and execute DDL
stmt, _ := tinysql.ParseSQL("CREATE TABLE users (id INT, name TEXT)")
tinysql.Execute(ctx, db, "default", stmt)
// Insert data
stmt, _ = tinysql.ParseSQL("INSERT INTO users VALUES (1, 'Alice')")
tinysql.Execute(ctx, db, "default", stmt)
// Query data
stmt, _ = tinysql.ParseSQL("SELECT * FROM users WHERE id = 1")
rs, _ := tinysql.Execute(ctx, db, "default", stmt)
for _, row := range rs.Rows {
fmt.Println(row)
}
Persistence ¶
Save and load database snapshots:
// Save to file
tinysql.SaveToFile(db, "mydb.gob")
// Load from file
db, err := tinysql.LoadFromFile("mydb.gob")
Advanced Features ¶
Enable MVCC for concurrent transactions:
mvcc := db.MVCC() tx, _ := mvcc.BeginTx(tinysql.SnapshotIsolation) // ... perform transaction operations ... mvcc.CommitTx(tx.ID)
Enable WAL for durability:
wal, _ := tinysql.NewAdvancedWAL("data/wal.log")
db.AttachAdvancedWAL(wal)
Query Compilation ¶
Pre-compile queries for better performance:
cache := tinysql.NewQueryCache(100)
query, _ := cache.Compile("SELECT * FROM users WHERE id = ?")
rs, _ := query.Execute(ctx, db, "default")
For more examples, see the example_test.go file in the repository.
Example ¶
Example demonstrates the usage of the TinySQL engine
package main
import (
"context"
"fmt"
"strings"
tsql "github.com/SimonWaldherr/tinySQL"
)
func main() {
db := tsql.NewDB()
dedent := func(s string) string {
trimmed := strings.TrimSpace(s)
if !strings.Contains(trimmed, "\n") {
return trimmed
}
lines := strings.Split(trimmed, "\n")
indent := -1
for _, line := range lines[1:] {
if strings.TrimSpace(line) == "" {
continue
}
leading := len(line) - len(strings.TrimLeft(line, " \t"))
if indent == -1 || leading < indent {
indent = leading
}
}
if indent > 0 {
for i := 1; i < len(lines); i++ {
if strings.TrimSpace(lines[i]) == "" {
lines[i] = ""
continue
}
if len(lines[i]) >= indent {
lines[i] = lines[i][indent:]
}
}
}
for i, line := range lines {
lines[i] = strings.TrimRight(line, " \t")
}
return strings.Join(lines, "\n")
}
run := func(sql string) {
display := dedent(sql)
fmt.Println("SQL>", display)
p := tsql.NewParser(sql)
st, err := p.ParseStatement()
if err != nil {
fmt.Println("ERR:", err)
fmt.Println()
return
}
rs, err := tsql.Execute(context.Background(), db, "default", st)
if err != nil {
fmt.Println("ERR:", err)
fmt.Println()
return
}
if rs == nil {
fmt.Println()
return
}
if len(rs.Rows) == 1 && len(rs.Cols) == 1 && (rs.Cols[0] == "updated" || rs.Cols[0] == "deleted") {
if val, ok := tsql.GetVal(rs.Rows[0], rs.Cols[0]); ok {
fmt.Printf("%s: %v\n\n", rs.Cols[0], val)
return
}
}
displayCols := make([]string, len(rs.Cols))
for i, col := range rs.Cols {
parts := strings.Split(col, ".")
displayCols[i] = parts[len(parts)-1]
}
fmt.Println(strings.Join(displayCols, " | "))
for _, row := range rs.Rows {
cells := make([]string, len(rs.Cols))
for i, col := range rs.Cols {
if v, ok := tsql.GetVal(row, col); ok {
cells[i] = fmt.Sprint(v)
} else {
cells[i] = ""
}
}
fmt.Println(strings.Join(cells, " | "))
}
fmt.Println()
}
// --- Create table and seed data ---
run(`CREATE TABLE users (
id INT,
name TEXT,
active BOOL,
score INT
)`)
run(`INSERT INTO users (id, name, active, score) VALUES (1, 'Alice', true, 40)`)
run(`INSERT INTO users (id, name, active, score) VALUES (2, 'Bob', false, 25)`)
run(`INSERT INTO users (id, name, active, score) VALUES (3, 'Carol', true, 30)`)
// --- Basic reads ---
run(`SELECT id, name, active, score FROM users ORDER BY id`)
run(`SELECT name, score FROM users WHERE active = true ORDER BY score DESC`)
// --- Update a row ---
run(`UPDATE users SET score = 50 WHERE name = 'Bob'`)
run(`SELECT name, score FROM users ORDER BY id`)
// --- Aggregate summary ---
run(`SELECT COUNT(*) AS total_users, SUM(score) AS total_score FROM users`)
// --- Delete inactive rows ---
run(`DELETE FROM users WHERE active = false`)
run(`SELECT name FROM users ORDER BY id`)
}
Output: SQL> CREATE TABLE users ( id INT, name TEXT, active BOOL, score INT ) SQL> INSERT INTO users (id, name, active, score) VALUES (1, 'Alice', true, 40) SQL> INSERT INTO users (id, name, active, score) VALUES (2, 'Bob', false, 25) SQL> INSERT INTO users (id, name, active, score) VALUES (3, 'Carol', true, 30) SQL> SELECT id, name, active, score FROM users ORDER BY id id | name | active | score 1 | Alice | true | 40 2 | Bob | false | 25 3 | Carol | true | 30 SQL> SELECT name, score FROM users WHERE active = true ORDER BY score DESC name | score Alice | 40 Carol | 30 SQL> UPDATE users SET score = 50 WHERE name = 'Bob' updated: 1 SQL> SELECT name, score FROM users ORDER BY id name | score Alice | 40 Bob | 50 Carol | 30 SQL> SELECT COUNT(*) AS total_users, SUM(score) AS total_score FROM users total_users | total_score 3 | 120 SQL> DELETE FROM users WHERE active = false deleted: 1 SQL> SELECT name FROM users ORDER BY id name Alice Carol
Index ¶
- Constants
- func GetVal(row Row, name string) (any, bool)
- func SaveToFile(db *DB, filename string) error
- func ToSQL(stmt engine.Statement) string
- type AdvancedWAL
- type AdvancedWALConfig
- type ColType
- type Column
- type CompiledQuery
- type DB
- type DeleteBuilder
- type ExprBuilder
- func Add(left, right ExprBuilder) ExprBuilder
- func And(exprs ...ExprBuilder) ExprBuilder
- func Avg(expr ExprBuilder) ExprBuilder
- func Coalesce(exprs ...ExprBuilder) ExprBuilder
- func Col(name string) ExprBuilder
- func Concat(exprs ...ExprBuilder) ExprBuilder
- func Count(expr ExprBuilder) ExprBuilder
- func CountStar() ExprBuilder
- func Div(left, right ExprBuilder) ExprBuilder
- func Eq(left, right ExprBuilder) ExprBuilder
- func Ge(left, right ExprBuilder) ExprBuilder
- func Gt(left, right ExprBuilder) ExprBuilder
- func IsNotNull(expr ExprBuilder) ExprBuilder
- func IsNull(expr ExprBuilder) ExprBuilder
- func Le(left, right ExprBuilder) ExprBuilder
- func Lower(expr ExprBuilder) ExprBuilder
- func Lt(left, right ExprBuilder) ExprBuilder
- func MD5(expr ExprBuilder) ExprBuilder
- func Max(expr ExprBuilder) ExprBuilder
- func Min(expr ExprBuilder) ExprBuilder
- func Mul(left, right ExprBuilder) ExprBuilder
- func Ne(left, right ExprBuilder) ExprBuilder
- func Not(expr ExprBuilder) ExprBuilder
- func Null() ExprBuilder
- func Or(exprs ...ExprBuilder) ExprBuilder
- func SHA1(expr ExprBuilder) ExprBuilder
- func SHA256(expr ExprBuilder) ExprBuilder
- func SHA512(expr ExprBuilder) ExprBuilder
- func Sub(left, right ExprBuilder) ExprBuilder
- func Sum(expr ExprBuilder) ExprBuilder
- func Trim(expr ExprBuilder) ExprBuilder
- func Upper(expr ExprBuilder) ExprBuilder
- func Val(value any) ExprBuilder
- type FuzzyImportOptions
- type ImportOptions
- type ImportResult
- func FuzzyImportCSV(ctx context.Context, db *DB, tenant, tableName string, src io.Reader, ...) (*ImportResult, error)
- func FuzzyImportJSON(ctx context.Context, db *DB, tenant, tableName string, src io.Reader, ...) (*ImportResult, error)
- func ImportCSV(ctx context.Context, db *DB, tenant, tableName string, src io.Reader, ...) (*ImportResult, error)
- func ImportFile(ctx context.Context, db *DB, tenant, tableName, filePath string, ...) (*ImportResult, error)
- func ImportJSON(ctx context.Context, db *DB, tenant, tableName string, src io.Reader, ...) (*ImportResult, error)
- type InsertBuilder
- type IsolationLevel
- type LSN
- type MVCCManager
- type Parser
- type QueryCache
- type ResultSet
- type Row
- type SelectBuilder
- func (sb *SelectBuilder) Build() *engine.Select
- func (sb *SelectBuilder) From(table string) *SelectBuilder
- func (sb *SelectBuilder) FromAs(table, alias string) *SelectBuilder
- func (sb *SelectBuilder) GroupBy(columns ...string) *SelectBuilder
- func (sb *SelectBuilder) GroupByExpr(exprs ...ExprBuilder) *SelectBuilder
- func (sb *SelectBuilder) Having(condition ExprBuilder) *SelectBuilder
- func (sb *SelectBuilder) Join(table string, on ExprBuilder) *SelectBuilder
- func (sb *SelectBuilder) JoinAs(table, alias string, on ExprBuilder) *SelectBuilder
- func (sb *SelectBuilder) LeftJoin(table string, on ExprBuilder) *SelectBuilder
- func (sb *SelectBuilder) LeftJoinAs(table, alias string, on ExprBuilder) *SelectBuilder
- func (sb *SelectBuilder) Limit(n int) *SelectBuilder
- func (sb *SelectBuilder) Offset(n int) *SelectBuilder
- func (sb *SelectBuilder) OrderBy(column string) *SelectBuilder
- func (sb *SelectBuilder) OrderByDesc(column string) *SelectBuilder
- func (sb *SelectBuilder) Where(condition ExprBuilder) *SelectBuilder
- type Statement
- type Table
- type TableBuilder
- func (tb *TableBuilder) Bool(name string) *TableBuilder
- func (tb *TableBuilder) Build() *engine.CreateTable
- func (tb *TableBuilder) Column(name string, colType ColType) *TableBuilder
- func (tb *TableBuilder) Create(db *DB, tenant string) error
- func (tb *TableBuilder) Float(name string) *TableBuilder
- func (tb *TableBuilder) Int(name string) *TableBuilder
- func (tb *TableBuilder) JSON(name string) *TableBuilder
- func (tb *TableBuilder) Temp() *TableBuilder
- func (tb *TableBuilder) Text(name string) *TableBuilder
- func (tb *TableBuilder) Timestamp(name string) *TableBuilder
- type Timestamp
- type TxContext
- type TxID
- type TxStatus
- type UpdateBuilder
- type WALOperationType
- type WALRecord
- type WithQuery
Examples ¶
Constants ¶
const ( TxStatusInProgress = storage.TxStatusInProgress // Transaction is active TxStatusCommitted = storage.TxStatusCommitted // Transaction committed successfully TxStatusAborted = storage.TxStatusAborted // Transaction was aborted )
Transaction status constants
const ( WALOpBegin = storage.WALOpBegin // Transaction begin WALOpInsert = storage.WALOpInsert // Row insert WALOpUpdate = storage.WALOpUpdate // Row update WALOpDelete = storage.WALOpDelete // Row delete WALOpCommit = storage.WALOpCommit // Transaction commit WALOpAbort = storage.WALOpAbort // Transaction abort WALOpCheckpoint = storage.WALOpCheckpoint // Checkpoint operation )
WAL operation type constants
Variables ¶
This section is empty.
Functions ¶
func GetVal ¶
GetVal retrieves a value from a result row by column name (case-insensitive).
Returns the value and true if the column exists, or nil and false otherwise. This is the recommended way to access row data as it handles case-insensitivity.
Example:
for _, row := range rs.Rows {
id, ok := tinysql.GetVal(row, "id")
if ok {
fmt.Printf("ID: %v\n", id)
}
name, _ := tinysql.GetVal(row, "Name") // Case-insensitive
fmt.Printf("Name: %v\n", name)
}
For type-safe access, use type assertion after retrieving the value.
func SaveToFile ¶
SaveToFile serializes the entire database to a GOB file for persistence.
This creates a snapshot of all tables, rows, and metadata for all tenants. The file can be loaded later with LoadFromFile to restore the database state.
Example:
err := tinysql.SaveToFile(db, "mydb.gob")
if err != nil {
log.Fatal(err)
}
Note: This saves the current state only. For durability during crashes, use AttachAdvancedWAL to enable write-ahead logging.
Types ¶
type AdvancedWAL ¶ added in v0.2.1
type AdvancedWAL = storage.AdvancedWAL
AdvancedWAL manages row-level write-ahead logging with REDO/UNDO support. Provides durability, crash recovery, and point-in-time recovery.
func NewAdvancedWAL ¶ added in v0.2.1
func NewAdvancedWAL(path string) (*AdvancedWAL, error)
NewAdvancedWAL creates a new write-ahead log manager with default configuration.
The WAL logs all database modifications (INSERT, UPDATE, DELETE) to disk before applying them. This enables:
- Crash recovery: replay committed transactions after restart
- Point-in-time recovery: restore to any previous state
- Durability: changes survive system crashes
Example:
wal, err := tinysql.NewAdvancedWAL("data/wal.log")
if err != nil {
log.Fatal(err)
}
defer wal.Close()
db.AttachAdvancedWAL(wal)
// Now all database modifications are logged
stmt, _ := tinysql.ParseSQL("INSERT INTO users VALUES (1, 'Alice')")
tinysql.Execute(ctx, db, "default", stmt)
The path parameter specifies the WAL file location.
func OpenAdvancedWAL ¶ added in v0.2.1
func OpenAdvancedWAL(config AdvancedWALConfig) (*AdvancedWAL, error)
OpenAdvancedWAL creates or opens a WAL with custom configuration.
This provides full control over WAL behavior including checkpoint intervals, compression, and buffer sizes.
Example:
wal, err := tinysql.OpenAdvancedWAL(tinysql.AdvancedWALConfig{
Path: "data/wal.log",
CheckpointPath: "data/checkpoint",
CheckpointEvery: 5000,
CheckpointInterval: 10 * time.Minute,
Compress: true,
BufferSize: 128 * 1024,
})
Returns the WAL instance or an error if initialization fails.
type AdvancedWALConfig ¶ added in v0.2.1
type AdvancedWALConfig = storage.AdvancedWALConfig
AdvancedWALConfig configures the advanced WAL behavior.
type ColType ¶ added in v0.2.1
ColType enumerates supported column data types (INT, TEXT, BOOL, JSON, etc.).
const ( // Integer types IntType ColType = storage.IntType Int8Type ColType = storage.Int8Type Int16Type ColType = storage.Int16Type Int32Type ColType = storage.Int32Type Int64Type ColType = storage.Int64Type UintType ColType = storage.UintType Uint8Type ColType = storage.Uint8Type // Floating point types Float32Type ColType = storage.Float32Type Float64Type ColType = storage.Float64Type FloatType ColType = storage.FloatType // String types StringType ColType = storage.StringType TextType ColType = storage.TextType // Boolean type BoolType ColType = storage.BoolType // Time types TimeType ColType = storage.TimeType DateType ColType = storage.DateType DateTimeType ColType = storage.DateTimeType TimestampType ColType = storage.TimestampType // Complex types JsonType ColType = storage.JsonType JsonbType ColType = storage.JsonbType )
type CompiledQuery ¶ added in v0.2.1
type CompiledQuery = engine.CompiledQuery
CompiledQuery represents a pre-parsed SQL statement that can be executed multiple times efficiently.
func Compile ¶
func Compile(cache *QueryCache, sql string) (*CompiledQuery, error)
Compile parses and caches a SQL query for reuse, similar to regexp.Compile.
The compiled query can be executed multiple times without re-parsing. This is useful for queries executed repeatedly with different parameters.
Example:
cache := tinysql.NewQueryCache(100)
query, err := tinysql.Compile(cache, "SELECT * FROM users WHERE active = true")
if err != nil {
log.Fatal(err)
}
// Execute the compiled query
rs, _ := query.Execute(ctx, db, "default")
Returns a CompiledQuery that can be executed via ExecuteCompiled or query.Execute.
func MustCompile ¶
func MustCompile(cache *QueryCache, sql string) *CompiledQuery
MustCompile is like Compile but panics on error, similar to regexp.MustCompile.
Useful for static queries in tests or initialization where errors are unexpected.
Example:
cache := tinysql.NewQueryCache(100) query := tinysql.MustCompile(cache, "SELECT * FROM users")
type DB ¶ added in v0.2.1
DB represents a multi-tenant database instance with support for MVCC and WAL. Use NewDB to create a new instance.
func LoadFromFile ¶
LoadFromFile deserializes a database from a GOB file created by SaveToFile.
This restores all tables, rows, and metadata from the file. The returned database instance is ready for use immediately.
Example:
db, err := tinysql.LoadFromFile("mydb.gob")
if err != nil {
log.Fatal(err)
}
defer db.Close()
Returns a new DB instance or an error if the file cannot be read.
func NewDB ¶
func NewDB() *DB
NewDB creates a new in-memory multi-tenant database instance with MVCC support.
The database starts empty with no tables. Use SQL DDL statements to create tables, or load from a file with LoadFromFile.
Example:
db := tinysql.NewDB() defer db.Close() // Optional cleanup
The returned DB is safe for concurrent use and includes an integrated MVCC manager.
func OpenFile ¶ added in v0.3.0
OpenFile opens a data file and returns a DB with the data loaded. This is a convenience function for quick data exploration.
Example:
db, tableName, _ := tinysql.OpenFile(context.Background(), "data.csv", nil)
stmt, _ := tinysql.ParseSQL(fmt.Sprintf("SELECT * FROM %s LIMIT 10", tableName))
rs, _ := tinysql.Execute(ctx, db, "default", stmt)
for _, row := range rs.Rows {
fmt.Println(row)
}
Parameters:
- ctx: Context for cancellation
- filePath: Path to the data file
- opts: Optional import configuration
Returns:
- db: New database instance with imported data
- tableName: The table name where data was loaded
- error: Any error encountered during import
Example ¶
ExampleOpenFile demonstrates the convenience function for opening and querying files.
ctx := context.Background()
// Create a temporary TSV file
tmpFile, _ := os.CreateTemp("", "data-*.tsv")
defer os.Remove(tmpFile.Name())
tsvContent := "city\tpopulation\tcountry\n"
tsvContent += "Tokyo\t37400000\tJapan\n"
tsvContent += "Delhi\t31400000\tIndia\n"
tsvContent += "Shanghai\t27800000\tChina\n"
tmpFile.WriteString(tsvContent)
tmpFile.Close()
// Open file directly (creates DB and imports in one step)
db, tableName, err := tinysql.OpenFile(ctx, tmpFile.Name(), &tinysql.ImportOptions{
HeaderMode: "present", // Explicitly mark header present for TSV
})
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Opened file into table: data\n")
// Query immediately - use simple count since column names may be sanitized
query := fmt.Sprintf("SELECT COUNT(*) as total FROM %s", tableName)
stmt, _ := tinysql.ParseSQL(query)
rs, err2 := tinysql.Execute(ctx, db, "default", stmt)
if err2 != nil {
fmt.Printf("Query error: %v\n", err2)
return
}
if len(rs.Rows) > 0 {
fmt.Printf("Total cities: %v\n", rs.Rows[0]["total"])
}
Output: Opened file into table: data Total cities: 3
type DeleteBuilder ¶ added in v0.3.0
type DeleteBuilder struct {
// contains filtered or unexported fields
}
DeleteBuilder provides a fluent interface for building DELETE statements.
func DeleteFrom ¶ added in v0.3.0
func DeleteFrom(table string) *DeleteBuilder
DeleteFrom creates a new DELETE builder.
func (*DeleteBuilder) Build ¶ added in v0.3.0
func (db *DeleteBuilder) Build() *engine.Delete
Build creates the DELETE statement.
func (*DeleteBuilder) Where ¶ added in v0.3.0
func (db *DeleteBuilder) Where(condition ExprBuilder) *DeleteBuilder
Where adds a WHERE condition.
type ExprBuilder ¶ added in v0.3.0
ExprBuilder is an interface for building SQL expressions.
func Add ¶ added in v0.3.0
func Add(left, right ExprBuilder) ExprBuilder
Add creates an addition expression (+).
func And ¶ added in v0.3.0
func And(exprs ...ExprBuilder) ExprBuilder
And creates a logical AND expression.
func Avg ¶ added in v0.3.0
func Avg(expr ExprBuilder) ExprBuilder
Avg creates an AVG aggregate function.
func Coalesce ¶ added in v0.3.0
func Coalesce(exprs ...ExprBuilder) ExprBuilder
Coalesce creates a COALESCE function call.
func Col ¶ added in v0.3.0
func Col(name string) ExprBuilder
Col creates a column reference expression.
Example:
Col("users.id")
Col("name")
func Concat ¶ added in v0.3.0
func Concat(exprs ...ExprBuilder) ExprBuilder
Concat creates a CONCAT function call.
func Count ¶ added in v0.3.0
func Count(expr ExprBuilder) ExprBuilder
Count creates a COUNT aggregate function.
func CountStar ¶ added in v0.3.0
func CountStar() ExprBuilder
CountStar creates a COUNT(*) aggregate.
func Div ¶ added in v0.3.0
func Div(left, right ExprBuilder) ExprBuilder
Div creates a division expression (/).
func Eq ¶ added in v0.3.0
func Eq(left, right ExprBuilder) ExprBuilder
Eq creates an equality comparison (=).
func Ge ¶ added in v0.3.0
func Ge(left, right ExprBuilder) ExprBuilder
Ge creates a greater-than-or-equal comparison (>=).
func Gt ¶ added in v0.3.0
func Gt(left, right ExprBuilder) ExprBuilder
Gt creates a greater-than comparison (>).
func IsNotNull ¶ added in v0.3.0
func IsNotNull(expr ExprBuilder) ExprBuilder
IsNotNull creates an IS NOT NULL expression.
func IsNull ¶ added in v0.3.0
func IsNull(expr ExprBuilder) ExprBuilder
IsNull creates an IS NULL expression.
func Le ¶ added in v0.3.0
func Le(left, right ExprBuilder) ExprBuilder
Le creates a less-than-or-equal comparison (<=).
func Lower ¶ added in v0.3.0
func Lower(expr ExprBuilder) ExprBuilder
Lower creates a LOWER function call.
func Lt ¶ added in v0.3.0
func Lt(left, right ExprBuilder) ExprBuilder
Lt creates a less-than comparison (<).
func MD5 ¶ added in v0.3.0
func MD5(expr ExprBuilder) ExprBuilder
MD5 creates an MD5 hash function call.
func Max ¶ added in v0.3.0
func Max(expr ExprBuilder) ExprBuilder
Max creates a MAX aggregate function.
func Min ¶ added in v0.3.0
func Min(expr ExprBuilder) ExprBuilder
Min creates a MIN aggregate function.
func Mul ¶ added in v0.3.0
func Mul(left, right ExprBuilder) ExprBuilder
Mul creates a multiplication expression (*).
func Ne ¶ added in v0.3.0
func Ne(left, right ExprBuilder) ExprBuilder
Ne creates a not-equal comparison (<>).
func Not ¶ added in v0.3.0
func Not(expr ExprBuilder) ExprBuilder
Not creates a logical NOT expression.
func Or ¶ added in v0.3.0
func Or(exprs ...ExprBuilder) ExprBuilder
Or creates a logical OR expression.
func SHA1 ¶ added in v0.3.0
func SHA1(expr ExprBuilder) ExprBuilder
SHA1 creates a SHA1 hash function call.
func SHA256 ¶ added in v0.3.0
func SHA256(expr ExprBuilder) ExprBuilder
SHA256 creates a SHA256 hash function call.
func SHA512 ¶ added in v0.3.0
func SHA512(expr ExprBuilder) ExprBuilder
SHA512 creates a SHA512 hash function call.
func Sub ¶ added in v0.3.0
func Sub(left, right ExprBuilder) ExprBuilder
Sub creates a subtraction expression (-).
func Sum ¶ added in v0.3.0
func Sum(expr ExprBuilder) ExprBuilder
Sum creates a SUM aggregate function.
func Trim ¶ added in v0.3.0
func Trim(expr ExprBuilder) ExprBuilder
Trim creates a TRIM function call.
func Upper ¶ added in v0.3.0
func Upper(expr ExprBuilder) ExprBuilder
Upper creates an UPPER function call.
func Val ¶ added in v0.3.0
func Val(value any) ExprBuilder
Val creates a literal value expression.
Example:
Val(42)
Val("hello")
Val(true)
type FuzzyImportOptions ¶ added in v0.3.1
type FuzzyImportOptions = importer.FuzzyImportOptions
FuzzyImportOptions extends ImportOptions with fuzzy parsing capabilities. Use this for importing data that may have formatting issues, inconsistent delimiters, malformed quotes, or other common data quality problems.
type ImportOptions ¶ added in v0.3.0
type ImportOptions = importer.ImportOptions
ImportOptions re-exports importer.ImportOptions for convenience. Configure import behavior including type inference, batching, and null handling.
Example ¶
ExampleImportOptions demonstrates various import configuration options.
ctx := context.Background()
db := tinysql.NewDB()
// CSV with custom options
csvData := `name;score;grade
Alice;95.5;A
Bob;87.3;B
Charlie;92.1;A`
result, _ := tinysql.ImportCSV(ctx, db, "default", "students",
strings.NewReader(csvData), &tinysql.ImportOptions{
CreateTable: true,
TypeInference: true,
HeaderMode: "present",
DelimiterCandidates: []rune{';'}, // Force semicolon delimiter
BatchSize: 100,
NullLiterals: []string{"", "NULL", "N/A"},
})
fmt.Printf("Imported %d rows using delimiter '%c'\n",
result.RowsInserted, result.Delimiter)
fmt.Printf("Columns: %v\n", result.ColumnNames)
// Note: Type names may vary (Float64Type vs FloatType)
fmt.Printf("Has %d columns with types detected\n", len(result.ColumnTypes))
Output: Imported 3 rows using delimiter ';' Columns: [name score grade] Has 3 columns with types detected
type ImportResult ¶ added in v0.3.0
type ImportResult = importer.ImportResult
ImportResult re-exports importer.ImportResult for convenience. Contains metadata about the import operation.
func FuzzyImportCSV ¶ added in v0.3.1
func FuzzyImportCSV(ctx context.Context, db *DB, tenant, tableName string, src io.Reader, opts *FuzzyImportOptions) (*ImportResult, error)
FuzzyImportCSV is a more forgiving version of ImportCSV that handles malformed data. It attempts to automatically fix common issues like:
- Inconsistent column counts (pads/truncates rows)
- Unmatched quotes in CSV fields
- Mixed delimiters within the same file
- Invalid UTF-8 characters
- Numbers with thousand separators
- Mixed data types in columns
Example:
opts := &tinysql.FuzzyImportOptions{
ImportOptions: &tinysql.ImportOptions{
CreateTable: true,
TypeInference: true,
},
SkipInvalidRows: true,
FixQuotes: true,
CoerceTypes: true,
}
result, err := tinysql.FuzzyImportCSV(ctx, db, "default", "messy_data", file, opts)
Parameters are the same as ImportCSV. Returns ImportResult which includes errors encountered (non-fatal in fuzzy mode).
func FuzzyImportJSON ¶ added in v0.3.1
func FuzzyImportJSON(ctx context.Context, db *DB, tenant, tableName string, src io.Reader, opts *FuzzyImportOptions) (*ImportResult, error)
FuzzyImportJSON attempts to parse malformed JSON data. It handles common JSON issues like:
- Single quotes instead of double quotes
- Unquoted object keys
- Line-delimited JSON (NDJSON format)
- Trailing commas
Example:
// Works even with malformed JSON like {'name': 'Alice', 'age': 30}
result, err := tinysql.FuzzyImportJSON(ctx, db, "default", "users", file, nil)
Parameters are the same as ImportJSON.
func ImportCSV ¶ added in v0.3.0
func ImportCSV(ctx context.Context, db *DB, tenant, tableName string, src io.Reader, opts *ImportOptions) (*ImportResult, error)
ImportCSV imports CSV/TSV data from a reader into a table. Use this for streaming imports or when you already have an io.Reader.
Example:
f, _ := os.Open("data.csv")
defer f.Close()
result, err := tinysql.ImportCSV(ctx, db, "default", "users", f, &tinysql.ImportOptions{
HeaderMode: "auto",
DelimiterCandidates: []rune{',', ';', '\t'},
})
Parameters:
- ctx: Context for cancellation
- db: Target database instance
- tenant: Tenant/schema name
- tableName: Target table name
- src: Input reader (file, network stream, stdin, etc.)
- opts: Optional configuration (nil uses defaults)
Returns ImportResult with metadata and any error encountered.
Example ¶
ExampleImportCSV demonstrates importing a CSV file into tinySQL.
ctx := context.Background()
db := tinysql.NewDB()
// Sample CSV data
csvData := `id,name,age,active
1,Alice,30,true
2,Bob,25,false
3,Charlie,35,true`
// Import CSV data
result, err := tinysql.ImportCSV(ctx, db, "default", "users",
strings.NewReader(csvData), &tinysql.ImportOptions{
CreateTable: true,
TypeInference: true,
HeaderMode: "present",
})
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Imported %d rows into table with columns: %v\n",
result.RowsInserted, result.ColumnNames)
fmt.Printf("Detected types: %v\n", result.ColumnTypes)
// Query the imported data
stmt, _ := tinysql.ParseSQL("SELECT name, age FROM users WHERE active = true ORDER BY age")
rs, _ := tinysql.Execute(ctx, db, "default", stmt)
for _, row := range rs.Rows {
fmt.Printf("Name: %v, Age: %v\n", row["name"], row["age"])
}
Output: Imported 3 rows into table with columns: [id name age active] Detected types: [INT TEXT INT BOOL] Name: Alice, Age: 30 Name: Charlie, Age: 35
Example (Compressed) ¶
ExampleImportCSV_compressed demonstrates importing gzip-compressed CSV files.
ctx := context.Background()
db := tinysql.NewDB()
// In real usage, you would use a .csv.gz file
// This example shows the API for when you have compressed data
csvData := `id,value
1,100
2,200
3,300`
// Import (gzip detection is automatic)
result, _ := tinysql.ImportCSV(ctx, db, "default", "data",
strings.NewReader(csvData), nil)
fmt.Printf("Imported %d rows with encoding: %s\n",
result.RowsInserted, result.Encoding)
Output: Imported 3 rows with encoding: utf-8
func ImportFile ¶ added in v0.3.0
func ImportFile(ctx context.Context, db *DB, tenant, tableName, filePath string, opts *ImportOptions) (*ImportResult, error)
ImportFile imports a structured data file (CSV, TSV, JSON, XML) into a table. The format is auto-detected from the file extension or content.
Supported formats:
- CSV (.csv) - Comma-separated values with auto-detected delimiters
- TSV (.tsv, .tab) - Tab-separated values
- JSON (.json) - Array of objects format: [{"id": 1, "name": "Alice"}, ...]
- XML (.xml) - Simple row-based XML (limited support)
- Compressed (.gz) - Transparent gzip decompression
Example:
db := tinysql.NewDB()
result, err := tinysql.ImportFile(ctx, db, "default", "users", "data.csv", &tinysql.ImportOptions{
CreateTable: true,
TypeInference: true,
BatchSize: 1000,
})
fmt.Printf("Imported %d rows\n", result.RowsInserted)
Parameters:
- ctx: Context for cancellation
- db: Target database instance
- tenant: Tenant/schema name (use "default" for single-tenant mode)
- tableName: Target table name (if empty, derived from filename)
- filePath: Path to the file to import
- opts: Optional configuration (nil uses sensible defaults)
Returns ImportResult with metadata and any error encountered.
Example ¶
ExampleImportFile demonstrates importing a file with format auto-detection.
ctx := context.Background()
db := tinysql.NewDB()
// Create a temporary CSV file
tmpFile, _ := os.CreateTemp("", "example-*.csv")
defer os.Remove(tmpFile.Name())
csvContent := `date,temperature,humidity
2024-01-01,72.5,45
2024-01-02,73.2,48
2024-01-03,71.8,52`
tmpFile.WriteString(csvContent)
tmpFile.Close()
// Import the file (format auto-detected from extension)
result, err := tinysql.ImportFile(ctx, db, "default", "weather",
tmpFile.Name(), nil) // nil uses default options
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Imported %d rows\n", result.RowsInserted)
fmt.Printf("Delimiter: %q, Header: %v\n", result.Delimiter, result.HadHeader)
// Query with aggregation
stmt, _ := tinysql.ParseSQL("SELECT COUNT(*) as row_count, AVG(temperature) as avg_temp FROM weather")
rs, _ := tinysql.Execute(ctx, db, "default", stmt)
if len(rs.Rows) > 0 {
fmt.Printf("Count: %v rows, Average temperature: %.2f°F\n",
rs.Rows[0]["row_count"], rs.Rows[0]["avg_temp"])
}
Output: Imported 3 rows Delimiter: ',', Header: true Count: 3 rows, Average temperature: 72.50°F
func ImportJSON ¶ added in v0.3.0
func ImportJSON(ctx context.Context, db *DB, tenant, tableName string, src io.Reader, opts *ImportOptions) (*ImportResult, error)
ImportJSON imports JSON data from a reader into a table. Supports array of objects format: [{"id": 1, "name": "Alice"}, ...]
Example:
jsonData := `[{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}]`
result, err := tinysql.ImportJSON(ctx, db, "default", "users",
strings.NewReader(jsonData), nil)
Parameters:
- ctx: Context for cancellation
- db: Target database instance
- tenant: Tenant/schema name
- tableName: Target table name
- src: Input reader
- opts: Optional configuration
Returns ImportResult with metadata and any error encountered.
Example ¶
ExampleImportJSON demonstrates importing JSON data into tinySQL.
ctx := context.Background()
db := tinysql.NewDB()
// Sample JSON data
jsonData := `[
{"id": 1, "product": "Laptop", "price": 999.99, "in_stock": true},
{"id": 2, "product": "Mouse", "price": 29.99, "in_stock": true},
{"id": 3, "product": "Keyboard", "price": 79.99, "in_stock": false}
]`
// Import JSON data
result, err := tinysql.ImportJSON(ctx, db, "default", "products",
strings.NewReader(jsonData), &tinysql.ImportOptions{
CreateTable: true,
TypeInference: true,
})
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Imported %d rows\n", result.RowsInserted)
// Query the imported data
stmt, _ := tinysql.ParseSQL("SELECT product, price FROM products WHERE in_stock = true")
rs, _ := tinysql.Execute(ctx, db, "default", stmt)
for _, row := range rs.Rows {
fmt.Printf("%v: $%v\n", row["product"], row["price"])
}
Output: Imported 3 rows Laptop: $999.99 Mouse: $29.99
type InsertBuilder ¶ added in v0.3.0
type InsertBuilder struct {
// contains filtered or unexported fields
}
InsertBuilder provides a fluent interface for building INSERT statements.
func InsertInto ¶ added in v0.3.0
func InsertInto(table string) *InsertBuilder
InsertInto creates a new INSERT builder.
func (*InsertBuilder) Build ¶ added in v0.3.0
func (ib *InsertBuilder) Build() *engine.Insert
Build creates the INSERT statement.
func (*InsertBuilder) Columns ¶ added in v0.3.0
func (ib *InsertBuilder) Columns(cols ...string) *InsertBuilder
Columns specifies the columns for the insert.
func (*InsertBuilder) Values ¶ added in v0.3.0
func (ib *InsertBuilder) Values(values ...ExprBuilder) *InsertBuilder
Values specifies the values to insert.
type IsolationLevel ¶ added in v0.2.1
type IsolationLevel = storage.IsolationLevel
IsolationLevel defines transaction isolation semantics.
const ( ReadCommitted IsolationLevel = storage.ReadCommitted // Read only committed data RepeatableRead IsolationLevel = storage.RepeatableRead // Repeatable reads within transaction SnapshotIsolation IsolationLevel = storage.SnapshotIsolation // Full snapshot isolation Serializable IsolationLevel = storage.Serializable // Serializable transactions with conflict detection )
Isolation level constants
type MVCCManager ¶ added in v0.2.1
type MVCCManager = storage.MVCCManager
MVCCManager coordinates multi-version concurrency control with snapshot isolation. Provides transaction begin/commit/abort operations and visibility checking.
type Parser ¶ added in v0.2.1
Parser parses SQL text into executable Statement objects. Create with NewParser and call ParseStatement() to parse.
func NewParser ¶
NewParser creates a new SQL parser for the provided input string.
The parser supports a practical subset of SQL including:
- DDL: CREATE TABLE, DROP TABLE
- DML: INSERT, UPDATE, DELETE
- Queries: SELECT with WHERE, JOIN, GROUP BY, HAVING, ORDER BY, LIMIT, OFFSET
- Set operations: UNION, INTERSECT, EXCEPT
- CTEs: WITH clauses
- Expressions: arithmetic, comparisons, functions, aggregates
Example:
parser := tinysql.NewParser("SELECT * FROM users WHERE active = true")
stmt, err := parser.ParseStatement()
if err != nil {
log.Fatal(err)
}
For better performance with repeated queries, consider using QueryCache.
type QueryCache ¶ added in v0.2.1
type QueryCache = engine.QueryCache
QueryCache stores compiled queries for reuse, similar to prepared statements. Create with NewQueryCache and use Compile() to cache queries.
func NewQueryCache ¶
func NewQueryCache(maxSize int) *QueryCache
NewQueryCache creates a new query cache for compiling and reusing parsed queries.
Query compilation parses SQL once and reuses the AST for multiple executions, similar to prepared statements. This improves performance for repeated queries.
The maxSize parameter limits the number of cached queries (LRU eviction). Use 0 for unlimited cache size (not recommended for production).
Example:
cache := tinysql.NewQueryCache(100)
query, _ := cache.Compile("SELECT * FROM users WHERE id = ?")
// Execute multiple times
for _, id := range userIDs {
rs, _ := query.Execute(ctx, db, "default")
// process results...
}
type ResultSet ¶ added in v0.2.1
ResultSet holds query results with column names and data rows. Returned by SELECT queries and available for inspection.
func Execute ¶
Execute executes a parsed SQL statement against the database.
The context allows for cancellation and timeout control. The tenant parameter provides data isolation - each tenant has its own namespace of tables.
For DDL statements (CREATE TABLE, DROP TABLE), returns nil ResultSet. For DML statements (INSERT, UPDATE, DELETE), returns nil ResultSet. For SELECT queries, returns ResultSet with columns and rows.
Example:
ctx := context.Background()
stmt, _ := tinysql.ParseSQL("SELECT * FROM users WHERE age > 18")
rs, err := tinysql.Execute(ctx, db, "default", stmt)
if err != nil {
log.Fatal(err)
}
// Process results
for _, row := range rs.Rows {
name, _ := tinysql.GetVal(row, "name")
fmt.Println(name)
}
The tenant parameter is required. Use "default" for single-tenant applications.
func ExecuteCompiled ¶
func ExecuteCompiled(ctx context.Context, db *DB, tenant string, compiled *CompiledQuery) (*ResultSet, error)
ExecuteCompiled executes a pre-compiled query against the database.
This is more efficient than Execute for queries executed repeatedly, as parsing is done only once during compilation.
Example:
cache := tinysql.NewQueryCache(100)
query, _ := cache.Compile("SELECT * FROM users")
rs, err := tinysql.ExecuteCompiled(ctx, db, "default", query)
Returns ResultSet for SELECT queries, nil for DDL/DML statements.
type Row ¶ added in v0.2.1
Row represents a single result row mapped by column name (case-insensitive). Keys include both qualified (table.column) and unqualified (column) names.
type SelectBuilder ¶ added in v0.3.0
type SelectBuilder struct {
// contains filtered or unexported fields
}
SelectBuilder provides a fluent interface for building SELECT queries.
func Select ¶ added in v0.3.0
func Select(projections ...ExprBuilder) *SelectBuilder
Select creates a new SELECT query builder with the specified projections.
Example:
q := tinysql.Select(Col("id"), Col("name")).
From("users").
Where(Gt(Col("age"), Val(18)))
func SelectDistinct ¶ added in v0.3.0
func SelectDistinct(projections ...ExprBuilder) *SelectBuilder
SelectDistinct creates a SELECT DISTINCT query.
func SelectStar ¶ added in v0.3.0
func SelectStar() *SelectBuilder
SelectStar creates a SELECT * query.
func With ¶ added in v0.3.0
func With(ctes ...WithQuery) *SelectBuilder
With adds Common Table Expressions (WITH clause) to a query.
Example:
top := tinysql.WithQuery{
Name: "top_users",
Query: tinysql.Select(Col("id"), Col("score")).
From("users").
Where(Gt(Col("score"), Val(100))),
}
q := tinysql.With(top).
Select(Col("id"), Col("amount")).
From("orders")
func (*SelectBuilder) Build ¶ added in v0.3.0
func (sb *SelectBuilder) Build() *engine.Select
Build converts the builder to an engine.Select statement.
func (*SelectBuilder) From ¶ added in v0.3.0
func (sb *SelectBuilder) From(table string) *SelectBuilder
From specifies the FROM table.
func (*SelectBuilder) FromAs ¶ added in v0.3.0
func (sb *SelectBuilder) FromAs(table, alias string) *SelectBuilder
FromAs specifies the FROM table with an alias.
func (*SelectBuilder) GroupBy ¶ added in v0.3.0
func (sb *SelectBuilder) GroupBy(columns ...string) *SelectBuilder
GroupBy adds GROUP BY columns.
func (*SelectBuilder) GroupByExpr ¶ added in v0.3.0
func (sb *SelectBuilder) GroupByExpr(exprs ...ExprBuilder) *SelectBuilder
GroupByExpr adds GROUP BY expressions.
func (*SelectBuilder) Having ¶ added in v0.3.0
func (sb *SelectBuilder) Having(condition ExprBuilder) *SelectBuilder
Having adds a HAVING condition.
func (*SelectBuilder) Join ¶ added in v0.3.0
func (sb *SelectBuilder) Join(table string, on ExprBuilder) *SelectBuilder
Join adds an INNER JOIN clause.
func (*SelectBuilder) JoinAs ¶ added in v0.3.0
func (sb *SelectBuilder) JoinAs(table, alias string, on ExprBuilder) *SelectBuilder
JoinAs adds an INNER JOIN with an alias.
func (*SelectBuilder) LeftJoin ¶ added in v0.3.0
func (sb *SelectBuilder) LeftJoin(table string, on ExprBuilder) *SelectBuilder
LeftJoin adds a LEFT OUTER JOIN clause.
func (*SelectBuilder) LeftJoinAs ¶ added in v0.3.0
func (sb *SelectBuilder) LeftJoinAs(table, alias string, on ExprBuilder) *SelectBuilder
LeftJoinAs adds a LEFT OUTER JOIN with an alias.
func (*SelectBuilder) Limit ¶ added in v0.3.0
func (sb *SelectBuilder) Limit(n int) *SelectBuilder
Limit sets the LIMIT clause.
func (*SelectBuilder) Offset ¶ added in v0.3.0
func (sb *SelectBuilder) Offset(n int) *SelectBuilder
Offset sets the OFFSET clause.
func (*SelectBuilder) OrderBy ¶ added in v0.3.0
func (sb *SelectBuilder) OrderBy(column string) *SelectBuilder
OrderBy adds an ORDER BY clause (ascending).
func (*SelectBuilder) OrderByDesc ¶ added in v0.3.0
func (sb *SelectBuilder) OrderByDesc(column string) *SelectBuilder
OrderByDesc adds an ORDER BY clause (descending).
func (*SelectBuilder) Where ¶ added in v0.3.0
func (sb *SelectBuilder) Where(condition ExprBuilder) *SelectBuilder
Where adds a WHERE condition.
type Statement ¶ added in v0.2.1
Statement is the base interface for all parsed SQL statements. Use Parser.ParseStatement() to obtain a Statement from SQL text.
func MustParseSQL ¶ added in v0.2.1
MustParseSQL is like ParseSQL but panics if parsing fails. Useful for static SQL in tests or initialization code.
Example:
stmt := tinysql.MustParseSQL("CREATE TABLE users (id INT, name TEXT)")
func ParseSQL ¶ added in v0.2.1
ParseSQL is a convenience function that creates a parser and parses a SQL statement in one call. Equivalent to NewParser(sql).ParseStatement().
Example:
stmt, err := tinysql.ParseSQL("SELECT id, name FROM users")
if err != nil {
log.Fatal(err)
}
Returns the parsed Statement or an error if parsing fails.
type Table ¶ added in v0.2.1
Table represents a database table with columns and rows. Tables are created via CREATE TABLE statements and accessed through the DB.
func NewTable ¶ added in v0.2.1
NewTable creates a new table with the specified columns.
This is a low-level API. Normally tables are created via CREATE TABLE statements. Use this when programmatically building table structures.
Example:
cols := []tinysql.Column{
{Name: "id", Type: tinysql.IntType},
{Name: "name", Type: tinysql.TextType},
}
table := tinysql.NewTable("users", cols, false)
db.Put("default", table)
The isTemp parameter indicates if this is a temporary table (not persisted).
type TableBuilder ¶ added in v0.3.0
type TableBuilder struct {
// contains filtered or unexported fields
}
TableBuilder provides a fluent interface for defining tables.
func NewTableBuilder ¶ added in v0.3.0
func NewTableBuilder(name string) *TableBuilder
NewTableBuilder creates a new table builder for programmatic schema definition.
func (*TableBuilder) Bool ¶ added in v0.3.0
func (tb *TableBuilder) Bool(name string) *TableBuilder
Bool adds a BOOL column.
func (*TableBuilder) Build ¶ added in v0.3.0
func (tb *TableBuilder) Build() *engine.CreateTable
Build creates the table definition.
func (*TableBuilder) Column ¶ added in v0.3.0
func (tb *TableBuilder) Column(name string, colType ColType) *TableBuilder
Column adds a column to the table.
func (*TableBuilder) Create ¶ added in v0.3.0
func (tb *TableBuilder) Create(db *DB, tenant string) error
Create executes the CREATE TABLE statement.
func (*TableBuilder) Float ¶ added in v0.3.0
func (tb *TableBuilder) Float(name string) *TableBuilder
Float adds a FLOAT column.
func (*TableBuilder) Int ¶ added in v0.3.0
func (tb *TableBuilder) Int(name string) *TableBuilder
Int adds an INT column.
func (*TableBuilder) JSON ¶ added in v0.3.0
func (tb *TableBuilder) JSON(name string) *TableBuilder
JSON adds a JSON column.
func (*TableBuilder) Temp ¶ added in v0.3.0
func (tb *TableBuilder) Temp() *TableBuilder
Temp marks the table as temporary.
func (*TableBuilder) Text ¶ added in v0.3.0
func (tb *TableBuilder) Text(name string) *TableBuilder
Text adds a TEXT column.
func (*TableBuilder) Timestamp ¶ added in v0.3.0
func (tb *TableBuilder) Timestamp(name string) *TableBuilder
Timestamp adds a TIMESTAMP column.
type Timestamp ¶ added in v0.2.1
Timestamp represents a logical timestamp for MVCC visibility checks.
type TxContext ¶ added in v0.2.1
TxContext holds the state of an active transaction including read/write sets and isolation level.
type UpdateBuilder ¶ added in v0.3.0
type UpdateBuilder struct {
// contains filtered or unexported fields
}
UpdateBuilder provides a fluent interface for building UPDATE statements.
func Update ¶ added in v0.3.0
func Update(table string) *UpdateBuilder
Update creates a new UPDATE builder.
func (*UpdateBuilder) Build ¶ added in v0.3.0
func (ub *UpdateBuilder) Build() *engine.Update
Build creates the UPDATE statement.
func (*UpdateBuilder) Set ¶ added in v0.3.0
func (ub *UpdateBuilder) Set(column string, value ExprBuilder) *UpdateBuilder
Set adds a column assignment.
func (*UpdateBuilder) Where ¶ added in v0.3.0
func (ub *UpdateBuilder) Where(condition ExprBuilder) *UpdateBuilder
Where adds a WHERE condition.
type WALOperationType ¶ added in v0.2.1
type WALOperationType = storage.WALOperationType
WALOperationType defines the type of WAL operation.
type WithQuery ¶ added in v0.3.0
type WithQuery struct {
Name string
Query *SelectBuilder
}
WithQuery represents a Common Table Expression (CTE).
Directories
¶
| Path | Synopsis |
|---|---|
|
bindings
|
|
|
python
command
|
|
|
cmd
|
|
|
catalog_demo
command
|
|
|
debug
command
|
|
|
demo
command
|
|
|
repl
command
|
|
|
server
command
|
|
|
sqltools
command
|
|
|
studio
command
|
|
|
tinysql
command
|
|
|
tinysqlpage
command
|
|
|
wasm_browser
command
|
|
|
wasm_browser/tools
command
|
|
|
wasm_node
command
|
|
|
internal
|
|
|
driver
Package driver implements a database/sql driver for tinySQL.
|
Package driver implements a database/sql driver for tinySQL. |
|
engine
Package engine provides SQL parsing, planning, and execution for tinySQL.
|
Package engine provides SQL parsing, planning, and execution for tinySQL. |
|
importer
Package importer provides automatic file import with type detection for tinySQL.
|
Package importer provides automatic file import with type detection for tinySQL. |
|
storage
Package storage provides the durable data structures for tinySQL.
|
Package storage provides the durable data structures for tinySQL. |
