summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--snappy-internal.h4
-rw-r--r--snappy.cc4
2 files changed, 4 insertions, 4 deletions
diff --git a/snappy-internal.h b/snappy-internal.h
index f3da93c..0653dc6 100644
--- a/snappy-internal.h
+++ b/snappy-internal.h
@@ -93,7 +93,7 @@ static inline int FindMatchLength(const char* s1,
// the first non-matching bit and use that to calculate the total
// length of the match.
while (PREDICT_TRUE(s2 <= s2_limit - 8)) {
- if (PREDICT_FALSE(UNALIGNED_LOAD64(s2) == UNALIGNED_LOAD64(s1 + matched))) {
+ if (UNALIGNED_LOAD64(s2) == UNALIGNED_LOAD64(s1 + matched)) {
s2 += 8;
matched += 8;
} else {
@@ -108,7 +108,7 @@ static inline int FindMatchLength(const char* s1,
}
}
while (PREDICT_TRUE(s2 < s2_limit)) {
- if (PREDICT_TRUE(s1[matched] == *s2)) {
+ if (s1[matched] == *s2) {
++s2;
++matched;
} else {
diff --git a/snappy.cc b/snappy.cc
index c73f56b..971adc2 100644
--- a/snappy.cc
+++ b/snappy.cc
@@ -138,7 +138,7 @@ namespace {
const int kMaxIncrementCopyOverflow = 10;
inline void IncrementalCopyFastPath(const char* src, char* op, ssize_t len) {
- while (op - src < 8) {
+ while (PREDICT_FALSE(op - src < 8)) {
UnalignedCopy64(src, op);
len -= op - src;
op += op - src;
@@ -215,7 +215,7 @@ static inline char* EmitCopyLessThan64(char* op, size_t offset, int len) {
static inline char* EmitCopy(char* op, size_t offset, int len) {
// Emit 64 byte copies but make sure to keep at least four bytes reserved
- while (len >= 68) {
+ while (PREDICT_FALSE(len >= 68)) {
op = EmitCopyLessThan64(op, offset, 64);
len -= 64;
}