本文整理汇总了Golang中compress/gzip.Writer.Write方法的典型用法代码示例。如果您正苦于以下问题:Golang Writer.Write方法的具体用法?Golang Writer.Write怎么用?Golang Writer.Write使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类compress/gzip.Writer
的用法示例。
在下文中一共展示了Writer.Write方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Golang代码示例。
示例1: NewFileWriter
func (nf *Netflow) NewFileWriter(filename string, mode int, compress bool) error {
log.Debugln("NewFileWriter")
if _, ok := nf.writers[filename]; ok {
return fmt.Errorf("netflow writer %v already exists", filename)
}
f, err := os.Create(filename)
if err != nil {
return err
}
c := make(chan *Packet, BUFFER_DEPTH)
go func() {
var w *gzip.Writer
if compress {
log.Debugln("using compression")
w = gzip.NewWriter(f)
}
for {
d := <-c
if d == nil {
break
}
if mode == ASCII {
if compress {
w.Write([]byte(d.GoString()))
} else {
f.Write([]byte(d.GoString()))
}
} else {
if compress {
w.Write(d.Raw)
} else {
f.Write(d.Raw)
}
}
}
if compress {
w.Close()
}
f.Close()
}()
nf.registerWriter(filename, c)
return nil
}
示例2: getPubmedRecords
func getPubmedRecords(urlFetcher *gopubmed.Fetcher, first bool, meshWriter *gzip.Writer, xmlWriter *gzip.Writer, transport *http.Transport, pmids []string) {
preUrlTime := time.Now()
articles, raw, err := urlFetcher.GetArticlesAndRaw(pmids)
if err != nil {
log.Fatal(err)
}
s := string(raw[:len(raw)])
for i := 0; i < len(articles); i++ {
pubmedArticle := articles[i]
if pubmedArticle.MedlineCitation != nil && pubmedArticle.MedlineCitation.MeshHeadingList != nil && pubmedArticle.MedlineCitation.MeshHeadingList.MeshHeading != nil {
fmt.Fprint(meshWriter, articles[i].MedlineCitation.PMID.Text)
for j := 0; j < len(pubmedArticle.MedlineCitation.MeshHeadingList.MeshHeading); j++ {
fmt.Fprint(meshWriter, "|")
fmt.Fprint(meshWriter, pubmedArticle.MedlineCitation.MeshHeadingList.MeshHeading[j].DescriptorName.Attr_UI)
fmt.Fprint(meshWriter, "::"+pubmedArticle.MedlineCitation.MeshHeadingList.MeshHeading[j].DescriptorName.Text)
if len(pubmedArticle.MedlineCitation.MeshHeadingList.MeshHeading[j].QualifierName) > 0 {
fmt.Fprint(meshWriter, "=")
for q := 0; q < len(pubmedArticle.MedlineCitation.MeshHeadingList.MeshHeading[j].QualifierName); q++ {
if q != 0 {
fmt.Fprint(meshWriter, "&")
}
fmt.Fprint(meshWriter, pubmedArticle.MedlineCitation.MeshHeadingList.MeshHeading[j].QualifierName[q].Attr_UI)
fmt.Fprint(meshWriter, "::"+pubmedArticle.MedlineCitation.MeshHeadingList.MeshHeading[j].QualifierName[q].Text)
}
}
}
fmt.Fprintln(meshWriter, "")
}
}
meshWriter.Flush()
if !first {
s = strings.Replace(s, startXml, "", -1)
s = strings.Replace(s, docType, "", -1)
s = strings.Replace(s, startPubmedArticleSet, "", -1)
}
s = strings.Replace(s, endPubmedArticleSet, "<!-- breakset -->", -1)
xmlWriter.Write([]byte(s))
postUrlTime := time.Now()
log.Println("Total request time:", postUrlTime.Sub(preUrlTime))
}
示例3: gzip
func (e *Engine) gzip() error {
var buf bytes.Buffer
var level int
var err error
level, err = e.stack.PopInt()
if err == nil {
var w *gzip.Writer
w, err = gzip.NewWriterLevel(&buf, level)
if err == nil {
_, err = w.Write(e.stack.Pop())
w.Close()
}
}
if err == nil {
e.stack.Push(buf.Bytes())
}
return err
}
示例4: sendContinuousChangesByWebSocket
func (h *handler) sendContinuousChangesByWebSocket(inChannels base.Set, options db.ChangesOptions) (error, bool) {
forceClose := false
handler := func(conn *websocket.Conn) {
h.logStatus(101, "Upgraded to WebSocket protocol")
defer func() {
conn.Close()
base.LogTo("HTTP+", "#%03d: --> WebSocket closed", h.serialNumber)
}()
// Read changes-feed options from an initial incoming WebSocket message in JSON format:
var compress bool
if msg, err := readWebSocketMessage(conn); err != nil {
return
} else {
var channelNames []string
var err error
if _, options, _, channelNames, _, compress, err = h.readChangesOptionsFromJSON(msg); err != nil {
return
}
if channelNames != nil {
inChannels, _ = channels.SetFromArray(channelNames, channels.ExpandStar)
}
}
// Set up GZip compression
var writer *bytes.Buffer
var zipWriter *gzip.Writer
if compress {
writer = bytes.NewBuffer(nil)
zipWriter = GetGZipWriter(writer)
}
caughtUp := false
_, forceClose = h.generateContinuousChanges(inChannels, options, func(changes []*db.ChangeEntry) error {
var data []byte
if changes != nil {
data, _ = json.Marshal(changes)
} else if !caughtUp {
caughtUp = true
data, _ = json.Marshal([]*db.ChangeEntry{})
} else {
data = []byte{}
}
if compress && len(data) > 8 {
// Compress JSON, using same GZip context, and send as binary msg:
zipWriter.Write(data)
zipWriter.Flush()
data = writer.Bytes()
writer.Reset()
conn.PayloadType = websocket.BinaryFrame
} else {
conn.PayloadType = websocket.TextFrame
}
_, err := conn.Write(data)
return err
})
if zipWriter != nil {
ReturnGZipWriter(zipWriter)
}
}
server := websocket.Server{
Handshake: func(*websocket.Config, *http.Request) error { return nil },
Handler: handler,
}
server.ServeHTTP(h.response, h.rq)
return nil, forceClose
}
示例5: processText
func (p Handler) processText(s *Session, w http.ResponseWriter, resp *http.Response) (err error) {
var (
zr *gzip.Reader
zw *gzip.Writer
body []byte
gzipped bool = resp.Header.Get("Content-Encoding") == "gzip"
reqHost string = resp.Request.URL.Host
reqPath string = resp.Request.URL.Path
)
if resp.ContentLength != 0 && resp.Request.Method != "HEAD" {
if gzipped {
zr, err = gzip.NewReader(resp.Body)
if err == nil {
body, err = ioutil.ReadAll(zr)
if !consumeError(&err) {
return dumpError(err)
}
}
} else {
body, err = ioutil.ReadAll(resp.Body)
if !consumeError(&err) {
return dumpError(err)
}
}
}
w.Header().Del("Content-Length")
w.Header().Set("Content-Encoding", "gzip")
w.WriteHeader(resp.StatusCode)
if len(body) <= 0 {
return
}
var (
rules []ReRule
bodyExtraHeader string
)
switch p {
case HD_html:
rules = reRules.Html
case HD_javascript:
rules = reRules.Js
case HD_json:
rules = reRules.Json
case HD_css:
rules = reRules.Css
}
if log.V(5) {
log.Infof("Original entity %s\n%s", reqPath, string(body))
}
if s.abusing {
imgSrc := fmt.Sprintf(`<img src="/!%s/sorry`, reqHost)
body = bytes.Replace(body, []byte(`<img src="/sorry`), []byte(imgSrc), 1)
rules = nil
}
for i, r := range rules {
if r.PathRe != nil && r.PathRe.FindString(reqPath) == NULL {
if log.V(4) {
log.Infof("re.%d=[%s] pathRe=deny", i, r.ContentPattern.Pattern)
}
continue
}
if log.V(4) {
log.Infof("re.%d=[%s] applied", i, r.ContentPattern.Pattern)
}
if r.Scheme&0xff > 0 {
body = r.ContentRe.Replace(body, r.Replacement)
}
if r.Scheme&0xff00 > 0 {
bodyExtraHeader += r.InsertHeader
}
}
zw = gzip.NewWriter(w)
if len(bodyExtraHeader) > 0 {
zw.Write([]byte(bodyExtraHeader))
}
zw.Write(body)
err = zw.Flush()
return
}
示例6: calcFileInfo
func calcFileInfo(fi *FileInfo) {
fmt.Printf("calcFileInfo: '%s'\n", fi.Path)
const BufSize = 16 * 1024
var buf [BufSize]byte
r, err := os.Open(fi.Path)
fataliferr(err)
defer r.Close()
sha1 := sha1.New()
md5Hash := md5.New()
fi.ShouldCompress = false
tryCompressFirsBlock := shouldTryCompressFile(fi.Path)
var gzw *gzip.Writer
compressedData := &bytes.Buffer{}
fi.Size = 0
fi.CompressedData = nil
for {
n, err := r.Read(buf[:])
if err == io.EOF {
break
}
d := buf[:n]
fataliferr(err)
fatalif(n == 0, "n is 0")
fi.Size += n
_, err = sha1.Write(d)
fataliferr(err)
_, err = md5Hash.Write(d)
fataliferr(err)
if tryCompressFirsBlock {
tryCompressFirsBlock = false
gz, err := gzip.NewWriterLevel(compressedData, gzip.BestCompression)
fataliferr(err)
_, err = gz.Write(d)
fataliferr(err)
gz.Close()
compressedSize := compressedData.Len()
saved := n - compressedSize
// relatively high threshold of 20% savings on compression
fi.ShouldCompress = saved > 0 && perc(compressedSize, saved) > 20
diff := n - compressedSize
fmt.Printf(" should compress: %v, %d => %d (%d %.2f%%)\n", fi.ShouldCompress, n, compressedSize, diff, perc(n, diff))
if fi.ShouldCompress {
compressedData = &bytes.Buffer{}
gzw, err = gzip.NewWriterLevel(compressedData, gzip.BestCompression)
fataliferr(err)
}
}
if gzw != nil {
_, err = gzw.Write(d)
fataliferr(err)
}
}
sha1Sum := sha1.Sum(nil)
fi.Sha1Hex = fmt.Sprintf("%x", sha1Sum)
if gzw != nil {
gzw.Close()
compressedSize := compressedData.Len()
// only use compressed if compressed by at least 5%
if compressedSize+(compressedSize/20) < fi.Size {
fi.CompressedData = compressedData.Bytes()
}
}
md5Sum := md5Hash.Sum(nil)
fi.Md5Hex = fmt.Sprintf("%x", md5Sum)
// if compressed, md5 is of the compressed content
if fi.CompressedData != nil {
md5Sum2 := md5.Sum(fi.CompressedData)
fi.Md5Hex = fmt.Sprintf("%x", md5Sum2[:])
}
fi.S3PathSha1Part = sha1HexToS3Path(fi.Sha1Hex)
ext := strings.ToLower(filepath.Ext(fi.Path))
if fi.CompressedData != nil {
fi.S3FullPath = fi.S3PathSha1Part + ".gz" + ext
} else {
fi.S3FullPath = fi.S3PathSha1Part + ext
}
fmt.Printf(" sha1: %s\n", fi.Sha1Hex)
fmt.Printf(" md5: %s\n", fi.Md5Hex)
fmt.Printf(" s3: %s\n", fi.S3FullPath)
fmt.Printf(" size: %d\n", fi.Size)
if fi.CompressedData != nil {
sizedCompressed := len(fi.CompressedData)
saved := fi.Size - sizedCompressed
fmt.Printf(" size compressed: %d (saves %d %.2f%%)\n", sizedCompressed, saved, perc(fi.Size, saved))
}
}