refactor: return errors with stack traces, where appropriate
1 file changed, 33 insertions(+), 19 deletions(-)
changed files
M internal/storage/sqlite/writer.go → internal/storage/sqlite/writer.go
@@ -40,8 +40,8 @@ type Options struct { Compress bool } -func OpenDB(dbPath string) (*sql.DB, error) { - return sql.Open( +func OpenDB(dbPath string) (*sql.DB, errors.E) { + db, err := sql.Open( "sqlite", fmt.Sprintf( "file:%s?mode=%s&_pragma=foreign_keys(1)&_pragma=mmap_size(%d)",@@ -50,9 +50,14 @@ "rwc", 16*1024*1024, ), ) + if err != nil { + return nil, errors.WithStack(err) + } + + return db, nil } -func NewWriter(db *sql.DB, logger *log.Logger, opts *Options) (*Writer, error) { +func NewWriter(db *sql.DB, logger *log.Logger, opts *Options) (*Writer, errors.E) { _, err := db.Exec(` CREATE TABLE IF NOT EXISTS url ( url_id INTEGER PRIMARY KEY,@@ -118,20 +123,25 @@ return w, nil } -func (s *Writer) Mkdirp(string) error { +func (s *Writer) Mkdirp(string) errors.E { return nil } -func (s *Writer) storeURL(path string) (int64, error) { +func (s *Writer) storeURL(path string) (int64, errors.E) { r, err := s.queries.insertURL.Exec(path) if err != nil { return 0, errors.WithMessagef(err, "inserting URL %s into database", path) } - return r.LastInsertId() + id, err := r.LastInsertId() + if err != nil { + return 0, errors.WithStack(err) + } + + return id, nil } -func (s *Writer) storeFile(urlID int64, file *storage.File) (int64, error) { +func (s *Writer) storeFile(urlID int64, file *storage.File) (int64, errors.E) { if file.ContentType == "" { file.ContentType = http.DetectContentType(file.Encodings["identity"].Bytes()) s.log.Warn(@@ -154,10 +164,15 @@ if err != nil { return 0, errors.WithMessage(err, "inserting file into database") } - return r.LastInsertId() + id, err := r.LastInsertId() + if err != nil { + return 0, errors.WithStack(err) + } + + return id, nil } -func (s *Writer) storeEncoding(fileID int64, encoding string, data []byte) error { +func (s *Writer) storeEncoding(fileID int64, encoding string, data []byte) errors.E { _, err := s.queries.insertContent.Exec( sql.Named("file_id", fileID), sql.Named("encoding", encoding),@@ -175,7 +190,7 @@ return nil } -func etag(content []byte) (string, error) { +func etag(content []byte) (string, errors.E) { hash := fnv.New64a() hash.Write(content)@@ -197,13 +212,13 @@ return file } -func (s *Writer) WritePost(post *content.Post, content *buffer.Buffer) error { +func (s *Writer) WritePost(post *content.Post, content *buffer.Buffer) errors.E { s.log.Debug("storing post", "title", post.Title) return s.WriteFile(s.NewFileFromPost(post), content) } -func (s *Writer) Write(pathname string, title string, content *buffer.Buffer) error { +func (s *Writer) Write(pathname string, title string, content *buffer.Buffer) errors.E { file := &storage.File{ Title: title, Path: pathname,@@ -214,7 +229,7 @@ return s.WriteFile(file, content) } -func (s *Writer) WriteFile(file *storage.File, content *buffer.Buffer) error { +func (s *Writer) WriteFile(file *storage.File, content *buffer.Buffer) errors.E { s.log.Debug("storing content", "pathname", file.Path) urlID, err := s.storeURL(file.Path)@@ -271,15 +286,16 @@ return nil } -func compress(encoding string, content *buffer.Buffer) (compressed *buffer.Buffer, err error) { +func compress(encoding string, content *buffer.Buffer) (*buffer.Buffer, errors.E) { var w io.WriteCloser - compressed = new(buffer.Buffer) + compressed := new(buffer.Buffer) switch encoding { case "gzip": w = gzip.NewWriter(compressed) case "br": w = brotli.NewWriter(compressed) case "zstd": + var err error w, err = zstd.NewWriter(compressed) if err != nil { return nil, errors.WithMessage(err, "could not create zstd writer")@@ -287,12 +303,10 @@ } } defer w.Close() - err = content.SeekStart() - if err != nil { + if err := content.SeekStart(); err != nil { return nil, errors.WithMessage(err, "seeking to start of content buffer") } - _, err = io.Copy(w, content) - if err != nil { + if _, err := io.Copy(w, content); err != nil { return nil, errors.WithMessage(err, "compressing file") }